Compare commits

..

1 Commits

Author SHA1 Message Date
Vinícius Lourenço
63bbfc3b88 fix(query-builder): filter with nil values should disable query 2026-03-23 18:03:45 -03:00
843 changed files with 20082 additions and 54092 deletions

View File

@@ -27,8 +27,8 @@ services:
- ${PWD}/fs/tmp/var/lib/clickhouse/user_scripts/:/var/lib/clickhouse/user_scripts/
- ${PWD}/../../../deploy/common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
ports:
- "127.0.0.1:8123:8123"
- "127.0.0.1:9000:9000"
- '127.0.0.1:8123:8123'
- '127.0.0.1:9000:9000'
tty: true
healthcheck:
test:
@@ -47,16 +47,13 @@ services:
condition: service_healthy
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
networks:
- default
- signoz-devenv
zookeeper:
image: signoz/zookeeper:3.7.1
container_name: zookeeper
volumes:
- ${PWD}/fs/tmp/zookeeper:/bitnami/zookeeper
ports:
- "127.0.0.1:2181:2181"
- '127.0.0.1:2181:2181'
environment:
- ALLOW_ANONYMOUS_LOGIN=yes
healthcheck:
@@ -77,19 +74,12 @@ services:
entrypoint:
- /bin/sh
command:
- -c
- |
/signoz-otel-collector migrate bootstrap &&
/signoz-otel-collector migrate sync up &&
/signoz-otel-collector migrate async up
- -c
- |
/signoz-otel-collector migrate bootstrap &&
/signoz-otel-collector migrate sync up &&
/signoz-otel-collector migrate async up
depends_on:
clickhouse:
condition: service_healthy
restart: on-failure
networks:
- default
- signoz-devenv
networks:
signoz-devenv:
name: signoz-devenv

View File

@@ -3,7 +3,7 @@ services:
image: signoz/signoz-otel-collector:v0.142.0
container_name: signoz-otel-collector-dev
entrypoint:
- /bin/sh
- /bin/sh
command:
- -c
- |
@@ -34,11 +34,4 @@ services:
retries: 3
restart: unless-stopped
extra_hosts:
- "host.docker.internal:host-gateway"
networks:
- default
- signoz-devenv
networks:
signoz-devenv:
name: signoz-devenv
- "host.docker.internal:host-gateway"

View File

@@ -12,10 +12,10 @@ receivers:
scrape_configs:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-collector
- targets:
- localhost:8888
labels:
job_name: otel-collector
processors:
batch:
@@ -29,26 +29,7 @@ processors:
signozspanmetrics/delta:
metrics_exporter: signozclickhousemetrics
metrics_flush_interval: 60s
latency_histogram_buckets:
[
100us,
1ms,
2ms,
6ms,
10ms,
50ms,
100ms,
250ms,
500ms,
1000ms,
1400ms,
2000ms,
5s,
10s,
20s,
40s,
60s,
]
latency_histogram_buckets: [100us, 1ms, 2ms, 6ms, 10ms, 50ms, 100ms, 250ms, 500ms, 1000ms, 1400ms, 2000ms, 5s, 10s, 20s, 40s, 60s ]
dimensions_cache_size: 100000
aggregation_temporality: AGGREGATION_TEMPORALITY_DELTA
enable_exp_histogram: true
@@ -79,13 +60,13 @@ extensions:
exporters:
clickhousetraces:
datasource: tcp://clickhouse:9000/signoz_traces
datasource: tcp://host.docker.internal:9000/signoz_traces
low_cardinal_exception_grouping: ${env:LOW_CARDINAL_EXCEPTION_GROUPING}
use_new_schema: true
signozclickhousemetrics:
dsn: tcp://clickhouse:9000/signoz_metrics
dsn: tcp://host.docker.internal:9000/signoz_metrics
clickhouselogsexporter:
dsn: tcp://clickhouse:9000/signoz_logs
dsn: tcp://host.docker.internal:9000/signoz_logs
timeout: 10s
use_new_schema: true
@@ -112,4 +93,4 @@ service:
logs:
receivers: [otlp]
processors: [batch]
exporters: [clickhouselogsexporter]
exporters: [clickhouselogsexporter]

4
.github/CODEOWNERS vendored
View File

@@ -86,8 +86,6 @@ go.mod @therealpandey
/pkg/types/alertmanagertypes @srikanthccv
/pkg/alertmanager/ @srikanthccv
/pkg/ruler/ @srikanthccv
/pkg/modules/rulestatehistory/ @srikanthccv
/pkg/types/rulestatehistorytypes/ @srikanthccv
# Correlation-adjacent
@@ -107,7 +105,7 @@ go.mod @therealpandey
/pkg/modules/authdomain/ @vikrantgupta25
/pkg/modules/role/ @vikrantgupta25
# IdentN Owners
# IdentN Owners
/pkg/identn/ @vikrantgupta25
/pkg/http/middleware/identn.go @vikrantgupta25

View File

@@ -29,7 +29,6 @@ jobs:
- name: fmt
run: |
make py-fmt
git diff --exit-code -- tests/integration/
- name: lint
run: |
make py-lint
@@ -51,7 +50,6 @@ jobs:
- alerts
- ingestionkeys
- rootuser
- serviceaccount
sqlstore-provider:
- postgres
- sqlite

View File

@@ -6,14 +6,12 @@ linters:
- depguard
- errcheck
- forbidigo
- godot
- govet
- iface
- ineffassign
- misspell
- nilnil
- sloglint
- staticcheck
- wastedassign
- unparam
- unused
@@ -37,7 +35,7 @@ linters:
- identical
sloglint:
no-mixed-args: true
attr-only: true
kv-only: true
no-global: all
context: all
static-msg: true

View File

@@ -17,7 +17,5 @@
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
},
"python-envs.defaultEnvManager": "ms-python.python:system",
"python-envs.pythonProjects": []
}
}

View File

@@ -4,16 +4,12 @@ import (
"context"
"log/slog"
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
"github.com/SigNoz/signoz/pkg/authz/openfgaserver"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
@@ -32,17 +28,18 @@ import (
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
"github.com/spf13/cobra"
)
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
var configFiles []string
var flags signoz.DeprecatedFlags
serverCmd := &cobra.Command{
Use: "server",
Short: "Run the SigNoz server",
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
RunE: func(currCmd *cobra.Command, args []string) error {
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, configFiles)
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
if err != nil {
return err
}
@@ -51,7 +48,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
},
}
serverCmd.Flags().StringArrayVar(&configFiles, "config", nil, "path to a YAML configuration file (can be specified multiple times, later files override earlier ones)")
flags.RegisterFlags(serverCmd)
parentCmd.AddCommand(serverCmd)
}
@@ -79,13 +76,8 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) (factory.ProviderFactory[authz.AuthZ, authz.Config], error) {
openfgaDataStore, err := openfgaserver.NewSQLStore(sqlstore)
if err != nil {
return nil, err
}
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), openfgaDataStore), nil
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
@@ -98,37 +90,37 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
return err
}
server, err := app.NewServer(config, signoz)
if err != nil {
logger.ErrorContext(ctx, "failed to create server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create server", "error", err)
return err
}
if err := server.Start(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start server", "error", err)
return err
}
signoz.Start(ctx)
if err := signoz.Wait(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
return err
}
err = server.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop server", "error", err)
return err
}
err = signoz.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
return err
}

View File

@@ -10,23 +10,18 @@ import (
"github.com/SigNoz/signoz/pkg/signoz"
)
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, configFiles []string) (signoz.Config, error) {
uris := make([]string, 0, len(configFiles)+1)
for _, f := range configFiles {
uris = append(uris, "file:"+f)
}
uris = append(uris, "env:")
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) {
config, err := signoz.NewConfig(
ctx,
logger,
config.ResolverConfig{
Uris: uris,
Uris: []string{"env:"},
ProviderFactories: []config.ProviderFactory{
envprovider.NewFactory(),
fileprovider.NewFactory(),
},
},
flags,
)
if err != nil {
return signoz.Config{}, err

View File

@@ -1,86 +0,0 @@
package cmd
import (
"context"
"log/slog"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewSigNozConfig_NoConfigFiles(t *testing.T) {
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, nil)
require.NoError(t, err)
assert.NotZero(t, config)
}
func TestNewSigNozConfig_SingleConfigFile(t *testing.T) {
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
err := os.WriteFile(configPath, []byte(`
cache:
provider: "redis"
`), 0644)
require.NoError(t, err)
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, []string{configPath})
require.NoError(t, err)
assert.Equal(t, "redis", config.Cache.Provider)
}
func TestNewSigNozConfig_MultipleConfigFiles_LaterOverridesEarlier(t *testing.T) {
dir := t.TempDir()
basePath := filepath.Join(dir, "base.yaml")
err := os.WriteFile(basePath, []byte(`
cache:
provider: "memory"
sqlstore:
provider: "sqlite"
`), 0644)
require.NoError(t, err)
overridePath := filepath.Join(dir, "override.yaml")
err = os.WriteFile(overridePath, []byte(`
cache:
provider: "redis"
`), 0644)
require.NoError(t, err)
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, []string{basePath, overridePath})
require.NoError(t, err)
// Later file overrides earlier
assert.Equal(t, "redis", config.Cache.Provider)
// Value from base file that wasn't overridden persists
assert.Equal(t, "sqlite", config.SQLStore.Provider)
}
func TestNewSigNozConfig_EnvOverridesConfigFile(t *testing.T) {
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
err := os.WriteFile(configPath, []byte(`
cache:
provider: "fromfile"
`), 0644)
require.NoError(t, err)
t.Setenv("SIGNOZ_CACHE_PROVIDER", "fromenv")
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, []string{configPath})
require.NoError(t, err)
// Env should override file
assert.Equal(t, "fromenv", config.Cache.Provider)
}
func TestNewSigNozConfig_NonexistentFile(t *testing.T) {
logger := slog.New(slog.DiscardHandler)
_, err := NewSigNozConfig(context.Background(), logger, []string{"/nonexistent/config.yaml"})
assert.Error(t, err)
}

View File

@@ -5,19 +5,16 @@ import (
"log/slog"
"time"
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
eequerier "github.com/SigNoz/signoz/ee/querier"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
eequerier "github.com/SigNoz/signoz/ee/querier"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -26,7 +23,6 @@ import (
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/licensing"
@@ -42,17 +38,18 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/spf13/cobra"
)
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
var configFiles []string
var flags signoz.DeprecatedFlags
serverCmd := &cobra.Command{
Use: "server",
Short: "Run the SigNoz server",
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
RunE: func(currCmd *cobra.Command, args []string) error {
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, configFiles)
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
if err != nil {
return err
}
@@ -61,7 +58,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
},
}
serverCmd.Flags().StringArrayVar(&configFiles, "config", nil, "path to a YAML configuration file (can be specified multiple times, later files override earlier ones)")
flags.RegisterFlags(serverCmd)
parentCmd.AddCommand(serverCmd)
}
@@ -72,7 +69,7 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
// add enterprise sqlstore factories to the community sqlstore factories
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", errors.Attr(err))
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
return err
}
@@ -119,13 +116,8 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
return authNs, nil
},
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) (factory.ProviderFactory[authz.AuthZ, authz.Config], error) {
openfgaDataStore, err := openfgaserver.NewSQLStore(sqlstore)
if err != nil {
return nil, err
}
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), openfgaDataStore, licensing, dashboardModule), nil
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
},
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
@@ -140,37 +132,37 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
return err
}
server, err := enterpriseapp.NewServer(config, signoz)
if err != nil {
logger.ErrorContext(ctx, "failed to create server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create server", "error", err)
return err
}
if err := server.Start(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start server", "error", err)
return err
}
signoz.Start(ctx)
if err := signoz.Wait(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
return err
}
err = server.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop server", "error", err)
return err
}
err = signoz.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
return err
}

View File

@@ -4,10 +4,8 @@ import (
"log/slog"
"os"
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/version"
"github.com/spf13/cobra"
)
var RootCmd = &cobra.Command{
@@ -22,7 +20,7 @@ var RootCmd = &cobra.Command{
func Execute(logger *slog.Logger) {
err := RootCmd.Execute()
if err != nil {
logger.ErrorContext(RootCmd.Context(), "error running command", errors.Attr(err))
logger.ErrorContext(RootCmd.Context(), "error running command", "error", err)
os.Exit(1)
}
}

4
conf/cache-config.yml Normal file
View File

@@ -0,0 +1,4 @@
provider: "inmemory"
inmemory:
ttl: 60m
cleanupInterval: 10m

View File

@@ -39,13 +39,6 @@ instrumentation:
host: "0.0.0.0"
port: 9090
##################### PProf #####################
pprof:
# Whether to enable the pprof server.
enabled: true
# The address on which the pprof server listens.
address: 0.0.0.0:6060
##################### Web #####################
web:
# Whether to enable the web frontend
@@ -85,12 +78,10 @@ sqlstore:
sqlite:
# The path to the SQLite database file.
path: /var/lib/signoz/signoz.db
# The journal mode for the sqlite database. Supported values: delete, wal.
# Mode is the mode to use for the sqlite database.
mode: delete
# The timeout for the sqlite database to wait for a lock.
# BusyTimeout is the timeout for the sqlite database to wait for a lock.
busy_timeout: 10s
# The default transaction locking behavior. Supported values: deferred, immediate, exclusive.
transaction_mode: deferred
##################### APIServer #####################
apiserver:
@@ -146,8 +137,6 @@ telemetrystore:
##################### Prometheus #####################
prometheus:
# The maximum time a PromQL query is allowed to run before being aborted.
timeout: 2m
active_query_tracker:
# Whether to enable the active query tracker.
enabled: true
@@ -354,13 +343,3 @@ identn:
impersonation:
# toggle impersonation identN, when enabled, all requests will impersonate the root user
enabled: false
##################### Service Account #####################
serviceaccount:
email:
# email domain for the service account principal
domain: signozserviceaccount.com
analytics:
# toggle service account analytics
enabled: true

25
conf/prometheus.yml Normal file
View File

@@ -0,0 +1,25 @@
# my global config
global:
scrape_interval: 5s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
# scrape_timeout is set to the global default (10s).
# Alertmanager configuration
alerting:
alertmanagers:
- static_configs:
- targets:
- 127.0.0.1:9093
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
rule_files:
# - "first_rules.yml"
# - "second_rules.yml"
- 'alerts.yml'
# A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself.
scrape_configs: []
remote_read:
- url: tcp://localhost:9000/signoz_metrics

View File

@@ -190,7 +190,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.117.1
image: signoz/signoz:v0.116.1
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.117.1
image: signoz/signoz:v0.116.1
ports:
- "8080:8080" # signoz port
volumes:

View File

@@ -181,7 +181,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.117.1}
image: signoz/signoz:${VERSION:-v0.116.1}
container_name: signoz
ports:
- "8080:8080" # signoz port

View File

@@ -109,7 +109,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.117.1}
image: signoz/signoz:${VERSION:-v0.116.1}
container_name: signoz
ports:
- "8080:8080" # signoz port

File diff suppressed because it is too large Load Diff

View File

@@ -123,7 +123,6 @@ if err := router.Handle("/api/v1/things", handler.New(
Description: "This endpoint creates a thing",
Request: new(types.PostableThing),
RequestContentType: "application/json",
RequestQuery: new(types.QueryableThing),
Response: new(types.GettableThing),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusCreated,
@@ -156,8 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
- **Request / RequestContentType**:
- `Request` is a Go type that describes the request body or form.
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
- **RequestQuery**:
- `RequestQuery` is a Go type that descirbes query url params.
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
- **Response / ResponseContentType**:
- `Response` is the Go type for the successful response payload.

View File

@@ -273,7 +273,6 @@ Options can be simple (direct link) or nested (with another question):
- Place logo files in `public/Logos/`
- Use SVG format
- Reference as `"/Logos/your-logo.svg"`
- **Fetching Icons**: New icons can be easily fetched from [OpenBrand](https://openbrand.sh/). Use the pattern `https://openbrand.sh/?url=<TARGET_URL>`, where `<TARGET_URL>` is the URL-encoded link to the service's website. For example, to get Render's logo, use [https://openbrand.sh/?url=https%3A%2F%2Frender.com](https://openbrand.sh/?url=https%3A%2F%2Frender.com).
- **Optimize new SVGs**: Run any newly downloaded SVGs through an optimizer like [SVGOMG (svgo)](https://svgomg.net/) or use `npx svgo public/Logos/your-logo.svg` to minimise their size before committing.
### 4. Links

View File

@@ -16,7 +16,7 @@ func (hp *HourlyProvider) GetBaseSeasonalProvider() *BaseSeasonalProvider {
return &hp.BaseSeasonalProvider
}
// NewHourlyProvider now uses the generic option type.
// NewHourlyProvider now uses the generic option type
func NewHourlyProvider(opts ...GenericProviderOption[*HourlyProvider]) *HourlyProvider {
hp := &HourlyProvider{
BaseSeasonalProvider: BaseSeasonalProvider{},

View File

@@ -47,7 +47,7 @@ type AnomaliesResponse struct {
// | |
// (rounded value for past peiod) + (seasonal growth)
//
// score = abs(value - prediction) / stddev (current_season_query).
// score = abs(value - prediction) / stddev (current_season_query)
type anomalyQueryParams struct {
// CurrentPeriodQuery is the query range params for period user is looking at or eval window
// Example: (now-5m, now), (now-30m, now), (now-1h, now)

View File

@@ -18,12 +18,12 @@ var (
movingAvgWindowSize = 7
)
// BaseProvider is an interface that includes common methods for all provider types.
// BaseProvider is an interface that includes common methods for all provider types
type BaseProvider interface {
GetBaseSeasonalProvider() *BaseSeasonalProvider
}
// GenericProviderOption is a generic type for provider options.
// GenericProviderOption is a generic type for provider options
type GenericProviderOption[T BaseProvider] func(T)
func WithQuerier[T BaseProvider](querier querier.Querier) GenericProviderOption[T] {
@@ -74,37 +74,37 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
instrumentationtypes.CodeFunctionName: "getResults",
})
// TODO(srikanthccv): parallelize this?
p.logger.InfoContext(ctx, "fetching results for current period", slog.Any("anomaly_current_period_query", params.CurrentPeriodQuery))
p.logger.InfoContext(ctx, "fetching results for current period", "anomaly_current_period_query", params.CurrentPeriodQuery)
currentPeriodResults, err := p.querier.QueryRange(ctx, orgID, &params.CurrentPeriodQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past period", slog.Any("anomaly_past_period_query", params.PastPeriodQuery))
p.logger.InfoContext(ctx, "fetching results for past period", "anomaly_past_period_query", params.PastPeriodQuery)
pastPeriodResults, err := p.querier.QueryRange(ctx, orgID, &params.PastPeriodQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for current season", slog.Any("anomaly_current_season_query", params.CurrentSeasonQuery))
p.logger.InfoContext(ctx, "fetching results for current season", "anomaly_current_season_query", params.CurrentSeasonQuery)
currentSeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.CurrentSeasonQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past season", slog.Any("anomaly_past_season_query", params.PastSeasonQuery))
p.logger.InfoContext(ctx, "fetching results for past season", "anomaly_past_season_query", params.PastSeasonQuery)
pastSeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.PastSeasonQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past 2 season", slog.Any("anomaly_past_2season_query", params.Past2SeasonQuery))
p.logger.InfoContext(ctx, "fetching results for past 2 season", "anomaly_past_2season_query", params.Past2SeasonQuery)
past2SeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.Past2SeasonQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past 3 season", slog.Any("anomaly_past_3season_query", params.Past3SeasonQuery))
p.logger.InfoContext(ctx, "fetching results for past 3 season", "anomaly_past_3season_query", params.Past3SeasonQuery)
past3SeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.Past3SeasonQuery)
if err != nil {
return nil, err
@@ -121,7 +121,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
}
// getMatchingSeries gets the matching series from the query result
// for the given series.
// for the given series
func (p *BaseSeasonalProvider) getMatchingSeries(_ context.Context, queryResult *qbtypes.TimeSeriesData, series *qbtypes.TimeSeries) *qbtypes.TimeSeries {
if queryResult == nil || len(queryResult.Aggregations) == 0 || len(queryResult.Aggregations[0].Series) == 0 {
return nil
@@ -155,14 +155,13 @@ func (p *BaseSeasonalProvider) getStdDev(series *qbtypes.TimeSeries) float64 {
avg := p.getAvg(series)
var sum float64
for _, smpl := range series.Values {
d := smpl.Value - avg
sum += d * d
sum += math.Pow(smpl.Value-avg, 2)
}
return math.Sqrt(sum / float64(len(series.Values)))
}
// getMovingAvg gets the moving average for the given series
// for the given window size and start index.
// for the given window size and start index
func (p *BaseSeasonalProvider) getMovingAvg(series *qbtypes.TimeSeries, movingAvgWindowSize, startIdx int) float64 {
if series == nil || len(series.Values) == 0 {
return 0
@@ -213,17 +212,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
if predictedValue < 0 {
// this should not happen (except when the data has extreme outliers)
// we will use the moving avg of the previous period series in this case
p.logger.WarnContext(ctx, "predicted value is less than 0 for series", slog.Float64("anomaly_predicted_value", predictedValue), slog.Any("anomaly_labels", series.Labels))
p.logger.WarnContext(ctx, "predicted value is less than 0 for series", "anomaly_predicted_value", predictedValue, "anomaly_labels", series.Labels)
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
}
p.logger.DebugContext(ctx, "predicted value for series",
slog.Float64("anomaly_moving_avg", movingAvg),
slog.Float64("anomaly_avg", avg),
slog.Float64("anomaly_mean", mean),
slog.Any("anomaly_labels", series.Labels),
slog.Float64("anomaly_predicted_value", predictedValue),
slog.Float64("anomaly_curr", curr.Value),
"anomaly_moving_avg", movingAvg,
"anomaly_avg", avg,
"anomaly_mean", mean,
"anomaly_labels", series.Labels,
"anomaly_predicted_value", predictedValue,
"anomaly_curr", curr.Value,
)
predictedSeries.Values = append(predictedSeries.Values, &qbtypes.TimeSeriesValue{
Timestamp: curr.Timestamp,
@@ -237,7 +236,7 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
// getBounds gets the upper and lower bounds for the given series
// for the given z score threshold
// moving avg of the previous period series + z score threshold * std dev of the series
// moving avg of the previous period series - z score threshold * std dev of the series.
// moving avg of the previous period series - z score threshold * std dev of the series
func (p *BaseSeasonalProvider) getBounds(
series, predictedSeries, weekSeries *qbtypes.TimeSeries,
zScoreThreshold float64,
@@ -270,7 +269,7 @@ func (p *BaseSeasonalProvider) getBounds(
// getExpectedValue gets the expected value for the given series
// for the given index
// prevSeriesAvg + currentSeasonSeriesAvg - mean of past season series, past2 season series and past3 season series.
// prevSeriesAvg + currentSeasonSeriesAvg - mean of past season series, past2 season series and past3 season series
func (p *BaseSeasonalProvider) getExpectedValue(
_, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *qbtypes.TimeSeries, idx int,
) float64 {
@@ -284,7 +283,7 @@ func (p *BaseSeasonalProvider) getExpectedValue(
// getScore gets the anomaly score for the given series
// for the given index
// (value - expectedValue) / std dev of the series.
// (value - expectedValue) / std dev of the series
func (p *BaseSeasonalProvider) getScore(
series, prevSeries, weekSeries, weekPrevSeries, past2SeasonSeries, past3SeasonSeries *qbtypes.TimeSeries, value float64, idx int,
) float64 {
@@ -297,7 +296,7 @@ func (p *BaseSeasonalProvider) getScore(
// getAnomalyScores gets the anomaly scores for the given series
// for the given index
// (value - expectedValue) / std dev of the series.
// (value - expectedValue) / std dev of the series
func (p *BaseSeasonalProvider) getAnomalyScores(
series, prevSeries, currentSeasonSeries, pastSeasonSeries, past2SeasonSeries, past3SeasonSeries *qbtypes.TimeSeries,
) *qbtypes.TimeSeries {
@@ -413,7 +412,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
stdDev := p.getStdDev(currentSeasonSeries)
p.logger.InfoContext(ctx, "calculated standard deviation for series", slog.Float64("anomaly_std_dev", stdDev), slog.Any("anomaly_labels", series.Labels))
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
prevSeriesAvg := p.getAvg(pastPeriodSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
@@ -421,12 +420,12 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
p.logger.InfoContext(ctx, "calculated mean for series",
slog.Float64("anomaly_prev_series_avg", prevSeriesAvg),
slog.Float64("anomaly_current_season_series_avg", currentSeasonSeriesAvg),
slog.Float64("anomaly_past_season_series_avg", pastSeasonSeriesAvg),
slog.Float64("anomaly_past_2season_series_avg", past2SeasonSeriesAvg),
slog.Float64("anomaly_past_3season_series_avg", past3SeasonSeriesAvg),
slog.Any("anomaly_labels", series.Labels),
"anomaly_prev_series_avg", prevSeriesAvg,
"anomaly_current_season_series_avg", currentSeasonSeriesAvg,
"anomaly_past_season_series_avg", pastSeasonSeriesAvg,
"anomaly_past_2season_series_avg", past2SeasonSeriesAvg,
"anomaly_past_3season_series_avg", past3SeasonSeriesAvg,
"anomaly_labels", series.Labels,
)
predictedSeries := p.getPredictedSeries(

View File

@@ -1,143 +0,0 @@
package otlphttpauditor
import (
"bytes"
"context"
"io"
"log/slog"
"net/http"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/audittypes"
collogspb "go.opentelemetry.io/proto/otlp/collector/logs/v1"
"google.golang.org/protobuf/proto"
spb "google.golang.org/genproto/googleapis/rpc/status"
)
const (
maxHTTPResponseReadBytes int64 = 64 * 1024
protobufContentType string = "application/x-protobuf"
)
func (provider *provider) export(ctx context.Context, events []audittypes.AuditEvent) error {
logs := audittypes.NewPLogsFromAuditEvents(events, "signoz", provider.build.Version(), "signoz.audit")
request, err := provider.marshaler.MarshalLogs(logs)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, auditor.ErrCodeAuditExportFailed, "failed to marshal audit logs")
}
if err := provider.send(ctx, request); err != nil {
provider.settings.Logger().ErrorContext(ctx, "audit export failed", errors.Attr(err), slog.Int("dropped_log_records", len(events)))
return err
}
return nil
}
// Posts a protobuf-encoded OTLP request to the configured endpoint.
// Retries are handled by the underlying heimdall HTTP client.
// Ref: https://github.com/open-telemetry/opentelemetry-collector/blob/main/exporter/otlphttpexporter/otlp.go
func (provider *provider) send(ctx context.Context, body []byte) error {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, provider.config.OTLPHTTP.Endpoint.String(), bytes.NewReader(body))
if err != nil {
return err
}
req.Header.Set("Content-Type", protobufContentType)
res, err := provider.httpClient.Do(req)
if err != nil {
return err
}
defer func() {
_, _ = io.CopyN(io.Discard, res.Body, maxHTTPResponseReadBytes)
res.Body.Close()
}()
if res.StatusCode >= 200 && res.StatusCode <= 299 {
provider.onSuccess(ctx, res)
return nil
}
return provider.onErr(res)
}
// Ref: https://github.com/open-telemetry/opentelemetry-collector/blob/01b07fcbb7a253bd996c290dcae6166e71d13732/exporter/otlphttpexporter/otlp.go#L403.
func (provider *provider) onSuccess(ctx context.Context, res *http.Response) {
resBytes, err := readResponseBody(res)
if err != nil || resBytes == nil {
return
}
exportResponse := &collogspb.ExportLogsServiceResponse{}
if err := proto.Unmarshal(resBytes, exportResponse); err != nil {
return
}
ps := exportResponse.GetPartialSuccess()
if ps == nil {
return
}
if ps.GetErrorMessage() != "" || ps.GetRejectedLogRecords() != 0 {
provider.settings.Logger().WarnContext(ctx, "partial success response", slog.String("message", ps.GetErrorMessage()), slog.Int64("dropped_log_records", ps.GetRejectedLogRecords()))
}
}
func (provider *provider) onErr(res *http.Response) error {
status := readResponseStatus(res)
if status != nil {
return errors.Newf(errors.TypeInternal, auditor.ErrCodeAuditExportFailed, "request to %s responded with status code %d, Message=%s, Details=%v", provider.config.OTLPHTTP.Endpoint.String(), res.StatusCode, status.Message, status.Details)
}
return errors.Newf(errors.TypeInternal, auditor.ErrCodeAuditExportFailed, "request to %s responded with status code %d", provider.config.OTLPHTTP.Endpoint.String(), res.StatusCode)
}
// Reads at most maxHTTPResponseReadBytes from the response body.
// Ref: https://github.com/open-telemetry/opentelemetry-collector/blob/01b07fcbb7a253bd996c290dcae6166e71d13732/exporter/otlphttpexporter/otlp.go#L275.
func readResponseBody(resp *http.Response) ([]byte, error) {
if resp.ContentLength == 0 {
return nil, nil
}
maxRead := resp.ContentLength
if maxRead == -1 || maxRead > maxHTTPResponseReadBytes {
maxRead = maxHTTPResponseReadBytes
}
protoBytes := make([]byte, maxRead)
n, err := io.ReadFull(resp.Body, protoBytes)
if n == 0 && (err == nil || errors.Is(err, io.EOF)) {
return nil, nil
}
if err != nil && !errors.Is(err, io.ErrUnexpectedEOF) {
return nil, err
}
return protoBytes[:n], nil
}
// Decodes a protobuf-encoded Status from 4xx/5xx response bodies. Returns nil if the response is empty or cannot be decoded.
// Ref: https://github.com/open-telemetry/opentelemetry-collector/blob/01b07fcbb7a253bd996c290dcae6166e71d13732/exporter/otlphttpexporter/otlp.go#L310.
func readResponseStatus(resp *http.Response) *spb.Status {
if resp.StatusCode < 400 || resp.StatusCode > 599 {
return nil
}
respBytes, err := readResponseBody(resp)
if err != nil || respBytes == nil {
return nil
}
respStatus := &spb.Status{}
if err := proto.Unmarshal(respBytes, respStatus); err != nil {
return nil
}
return respStatus
}

View File

@@ -1,97 +0,0 @@
package otlphttpauditor
import (
"context"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/auditor/auditorserver"
"github.com/SigNoz/signoz/pkg/factory"
client "github.com/SigNoz/signoz/pkg/http/client"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/types/audittypes"
"github.com/SigNoz/signoz/pkg/version"
"go.opentelemetry.io/collector/pdata/plog"
)
var _ auditor.Auditor = (*provider)(nil)
type provider struct {
settings factory.ScopedProviderSettings
config auditor.Config
licensing licensing.Licensing
build version.Build
server *auditorserver.Server
marshaler plog.ProtoMarshaler
httpClient *client.Client
}
func NewFactory(licensing licensing.Licensing, build version.Build) factory.ProviderFactory[auditor.Auditor, auditor.Config] {
return factory.NewProviderFactory(factory.MustNewName("otlphttp"), func(ctx context.Context, providerSettings factory.ProviderSettings, config auditor.Config) (auditor.Auditor, error) {
return newProvider(ctx, providerSettings, config, licensing, build)
})
}
func newProvider(_ context.Context, providerSettings factory.ProviderSettings, config auditor.Config, licensing licensing.Licensing, build version.Build) (auditor.Auditor, error) {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/auditor/otlphttpauditor")
httpClient, err := client.New(
settings.Logger(),
providerSettings.TracerProvider,
providerSettings.MeterProvider,
client.WithTimeout(config.OTLPHTTP.Timeout),
client.WithRetryCount(retryCountFromConfig(config.OTLPHTTP.Retry)),
retrierOption(config.OTLPHTTP.Retry),
)
if err != nil {
return nil, err
}
provider := &provider{
settings: settings,
config: config,
licensing: licensing,
build: build,
marshaler: plog.ProtoMarshaler{},
httpClient: httpClient,
}
server, err := auditorserver.New(settings,
auditorserver.Config{
BufferSize: config.BufferSize,
BatchSize: config.BatchSize,
FlushInterval: config.FlushInterval,
},
provider.export,
)
if err != nil {
return nil, err
}
provider.server = server
return provider, nil
}
func (provider *provider) Start(ctx context.Context) error {
return provider.server.Start(ctx)
}
func (provider *provider) Audit(ctx context.Context, event audittypes.AuditEvent) {
if event.PrincipalOrgID.IsZero() {
provider.settings.Logger().WarnContext(ctx, "audit event dropped as org_id is zero")
return
}
if _, err := provider.licensing.GetActive(ctx, event.PrincipalOrgID); err != nil {
return
}
provider.server.Add(ctx, event)
}
func (provider *provider) Healthy() <-chan struct{} {
return provider.server.Healthy()
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.server.Stop(ctx)
}

View File

@@ -1,52 +0,0 @@
package otlphttpauditor
import (
"time"
"github.com/SigNoz/signoz/pkg/auditor"
client "github.com/SigNoz/signoz/pkg/http/client"
)
// retrier implements client.Retriable with exponential backoff
// derived from auditor.RetryConfig.
type retrier struct {
initialInterval time.Duration
maxInterval time.Duration
}
func newRetrier(cfg auditor.RetryConfig) *retrier {
return &retrier{
initialInterval: cfg.InitialInterval,
maxInterval: cfg.MaxInterval,
}
}
// NextInterval returns the backoff duration for the given retry attempt.
// Uses exponential backoff: initialInterval * 2^retry, capped at maxInterval.
func (r *retrier) NextInterval(retry int) time.Duration {
interval := r.initialInterval
for range retry {
interval *= 2
}
return min(interval, r.maxInterval)
}
func retrierOption(cfg auditor.RetryConfig) client.Option {
return client.WithRetriable(newRetrier(cfg))
}
func retryCountFromConfig(cfg auditor.RetryConfig) int {
if !cfg.Enabled || cfg.MaxElapsedTime <= 0 {
return 0
}
count := 0
elapsed := time.Duration(0)
interval := cfg.InitialInterval
for elapsed < cfg.MaxElapsedTime {
elapsed += interval
interval = min(interval*2, cfg.MaxInterval)
count++
}
return count
}

View File

@@ -3,7 +3,6 @@ package oidccallbackauthn
import (
"context"
"fmt"
"log/slog"
"net/url"
"github.com/SigNoz/signoz/pkg/authn"
@@ -151,7 +150,7 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
// Some IDPs return a single group as a string instead of an array
groups = append(groups, g)
default:
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", slog.String("type", fmt.Sprintf("%T", claimValue)))
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
}
}
}

View File

@@ -16,7 +16,6 @@ import (
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
"github.com/openfga/openfga/pkg/storage"
)
type provider struct {
@@ -27,14 +26,14 @@ type provider struct {
registry []authz.RegisterTypeable
}
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, openfgaDataStore storage.OpenFGADatastore, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, openfgaDataStore, licensing, registry)
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
})
}
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, openfgaDataStore storage.OpenFGADatastore, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema, openfgaDataStore)
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
if err != nil {
return nil, err
@@ -58,10 +57,6 @@ func (provider *provider) Start(ctx context.Context) error {
return provider.openfgaServer.Start(ctx)
}
func (provider *provider) Healthy() <-chan struct{} {
return provider.openfgaServer.Healthy()
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.openfgaServer.Stop(ctx)
}

View File

@@ -16,6 +16,7 @@ type Server struct {
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
@@ -25,31 +26,14 @@ func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Healthy() <-chan struct{} {
return server.pkgAuthzService.Healthy()
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
subject := ""
switch claims.Principal {
case authtypes.PrincipalUser:
user, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
subject = user
case authtypes.PrincipalServiceAccount:
serviceAccount, err := authtypes.NewSubject(authtypes.TypeableServiceAccount, claims.ServiceAccountID, orgID, nil)
if err != nil {
return err
}
subject = serviceAccount
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
if err != nil {
return err
}
tupleSlice, err := typeable.Tuples(subject, relation, selectors, orgID)

View File

@@ -1,32 +0,0 @@
package openfgaserver
import (
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/openfga/openfga/pkg/storage"
"github.com/openfga/openfga/pkg/storage/postgres"
"github.com/openfga/openfga/pkg/storage/sqlcommon"
"github.com/openfga/openfga/pkg/storage/sqlite"
)
func NewSQLStore(store sqlstore.SQLStore) (storage.OpenFGADatastore, error) {
switch store.BunDB().Dialect().Name().String() {
case "sqlite":
return sqlite.NewWithDB(store.SQLDB(), &sqlcommon.Config{
MaxTuplesPerWriteField: 100,
MaxTypesPerModelField: 100,
})
case "pg":
pgStore, ok := store.(postgressqlstore.Pooler)
if !ok {
panic(errors.New(errors.TypeInternal, errors.CodeInternal, "postgressqlstore should implement Pooler"))
}
return postgres.NewWithDB(pgStore.Pool(), nil, &sqlcommon.Config{
MaxTuplesPerWriteField: 100,
MaxTypesPerModelField: 100,
})
}
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid store type: %s", store.BunDB().Dialect().Name().String())
}

View File

@@ -13,7 +13,7 @@ var (
once sync.Once
)
// Config initializes the licensing configuration.
// initializes the licensing configuration
func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
once.Do(func() {
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}

View File

@@ -3,11 +3,8 @@ package httplicensing
import (
"context"
"encoding/json"
"log/slog"
"time"
"github.com/tidwall/gjson"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
@@ -19,6 +16,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
type provider struct {
@@ -57,7 +55,7 @@ func (provider *provider) Start(ctx context.Context) error {
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
for {
@@ -67,7 +65,7 @@ func (provider *provider) Start(ctx context.Context) error {
case <-tick.C:
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
}
}
@@ -135,7 +133,7 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
if errors.Ast(err, errors.TypeNotFound) {
return nil
}
provider.settings.Logger().ErrorContext(ctx, "license validation failed", slog.String("org_id", organizationID.StringValue()))
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
return err
}

View File

@@ -213,8 +213,8 @@ func (module *module) Update(ctx context.Context, orgID valuer.UUID, id valuer.U
return module.pkgDashboardModule.Update(ctx, orgID, id, updatedBy, data, diff)
}
func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, isAdmin bool, lock bool) error {
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, isAdmin, lock)
func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, role types.Role, lock bool) error {
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
}
func (module *module) MustGetTypeables() []authtypes.Typeable {

View File

@@ -6,6 +6,7 @@ import (
"encoding/json"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
@@ -53,6 +54,26 @@ func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return
@@ -79,7 +100,7 @@ func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
// Build step intervals from the anomaly query
stepIntervals := make(map[string]uint64)
if anomalyQuery.StepInterval.Duration > 0 {
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Seconds())
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
}
finalResp := &qbtypes.QueryRangeResponse{

View File

@@ -10,15 +10,15 @@ import (
"strings"
"time"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"log/slog"
)
type CloudIntegrationConnectionParamsResponse struct {
@@ -49,7 +49,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
return
}
apiKey, apiErr := ah.getOrCreateCloudIntegrationFactorAPIKey(r.Context(), valuer.MustNewUUID(claims.OrgID), cloudProvider)
apiKey, apiErr := ah.getOrCreateCloudIntegrationPAT(r.Context(), claims.OrgID, cloudProvider)
if apiErr != nil {
RespondError(w, basemodel.WrapApiError(
apiErr, "couldn't provision PAT for cloud integration:",
@@ -109,44 +109,79 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
ah.Respond(w, result)
}
func (ah *APIHandler) getOrCreateCloudIntegrationFactorAPIKey(ctx context.Context, orgID valuer.UUID, cloudProvider string) (
func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId string, cloudProvider string) (
string, *basemodel.ApiError,
) {
integrationPATName := fmt.Sprintf("%s", cloudProvider)
serviceAccount, apiErr := ah.getOrCreateCloudIntegrationServiceAccount(ctx, orgID)
integrationPATName := fmt.Sprintf("%s integration", cloudProvider)
integrationUser, apiErr := ah.getOrCreateCloudIntegrationUser(ctx, orgId, cloudProvider)
if apiErr != nil {
return "", apiErr
}
factorAPIKey, err := serviceAccount.NewFactorAPIKey(integrationPATName, 0)
orgIdUUID, err := valuer.NewUUID(orgId)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't parse orgId: %w", err,
))
}
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err,
))
}
for _, p := range allPats {
if p.UserID == integrationUser.ID && p.Name == integrationPATName {
return p.Token, nil
}
}
slog.InfoContext(ctx, "no PAT found for cloud integration, creating a new one",
"cloud_provider", cloudProvider,
)
newPAT, err := types.NewStorableAPIKey(
integrationPATName,
integrationUser.ID,
types.RoleViewer,
0,
)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
factorAPIKey, err = ah.Signoz.Modules.ServiceAccount.GetOrCreateFactorAPIKey(ctx, factorAPIKey)
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
))
}
return factorAPIKey.Key, nil
return newPAT.Token, nil
}
func (ah *APIHandler) getOrCreateCloudIntegrationServiceAccount(ctx context.Context, orgId valuer.UUID) (*serviceaccounttypes.ServiceAccount, *basemodel.ApiError) {
domain := ah.Signoz.Modules.ServiceAccount.Config().Email.Domain
cloudIntegrationServiceAccount := serviceaccounttypes.NewServiceAccount("integration", domain, serviceaccounttypes.ServiceAccountStatusActive, orgId)
cloudIntegrationServiceAccount, err := ah.Signoz.Modules.ServiceAccount.GetOrCreate(ctx, orgId, cloudIntegrationServiceAccount)
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
ctx context.Context, orgId string, cloudProvider string,
) (*types.User, *basemodel.ApiError) {
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId), types.UserStatusActive)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration service account: %w", err))
}
err = ah.Signoz.Modules.ServiceAccount.SetRoleByName(ctx, orgId, cloudIntegrationServiceAccount.ID, authtypes.SigNozViewerRoleName)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration service account: %w", err))
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
return cloudIntegrationServiceAccount, nil
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
}
return cloudIntegrationUser, nil
}
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (

View File

@@ -4,15 +4,10 @@ import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"time"
signozerrors "github.com/SigNoz/signoz/pkg/errors"
"log/slog"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -20,6 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"log/slog"
)
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
@@ -42,7 +38,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
slog.DebugContext(ctx, "fetching license")
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
slog.ErrorContext(ctx, "failed to fetch license", signozerrors.Attr(err))
slog.ErrorContext(ctx, "failed to fetch license", "error", err)
} else if license == nil {
slog.DebugContext(ctx, "no active license found")
} else {
@@ -55,7 +51,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
} else {
slog.ErrorContext(ctx, "failed to fetch zeus features", signozerrors.Attr(err))
slog.ErrorContext(ctx, "failed to fetch zeus features", "error", err)
}
}
}

View File

@@ -7,7 +7,6 @@ import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -36,7 +35,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
if apiErrorObj != nil {
slog.ErrorContext(r.Context(), "error parsing metric query range params", errors.Attr(apiErrorObj.Err))
slog.ErrorContext(r.Context(), "error parsing metric query range params", "error", apiErrorObj.Err)
RespondError(w, apiErrorObj, nil)
return
}
@@ -45,7 +44,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
// add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
if temporalityErr != nil {
slog.ErrorContext(r.Context(), "error while adding temporality for metrics", errors.Attr(temporalityErr))
slog.ErrorContext(r.Context(), "error while adding temporality for metrics", "error", temporalityErr)
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
return
}

View File

@@ -5,23 +5,19 @@ import (
"fmt"
"net"
"net/http"
_ "net/http/pprof" // http profiler
"slices"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
"github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
"github.com/gorilla/handlers"
"github.com/rs/cors"
"github.com/soheilhy/cmux"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
@@ -29,15 +25,14 @@ import (
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/http/middleware"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/web"
"log/slog"
"github.com/rs/cors"
"github.com/soheilhy/cmux"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -52,6 +47,7 @@ import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"log/slog"
)
// Server runs HTTP, Mux and a grpc server
@@ -107,7 +103,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
signoz.TelemetryMetadataStore,
signoz.Prometheus,
signoz.Modules.OrgGetter,
signoz.Modules.RuleStateHistory,
signoz.Querier,
signoz.Instrumentation.ToProviderSettings(),
signoz.QueryParser,
@@ -138,7 +133,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
logParsingPipelineController, err := logparsingpipeline.NewLogParsingPipelinesController(
signoz.SQLStore,
integrationsController.GetPipelinesForInstalledIntegrations,
reader,
)
if err != nil {
return nil, err
@@ -213,7 +207,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
r.Use(middleware.NewRecovery(s.signoz.Instrumentation.Logger()).Wrap)
r.Use(otelmux.Middleware(
"apiserver",
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
@@ -222,6 +215,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
otelmux.WithFilter(func(r *http.Request) bool {
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
}),
otelmux.WithPublicEndpoint(),
))
r.Use(middleware.NewIdentN(s.signoz.IdentNResolver, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
@@ -242,6 +236,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
apiHandler.RegisterWebSocketPaths(r, am)
apiHandler.RegisterMessagingQueuesRoutes(r, am)
apiHandler.RegisterThirdPartyApiRoutes(r, am)
apiHandler.MetricExplorerRoutes(r, am)
apiHandler.RegisterTraceFunnelsRoutes(r, am)
err := s.signoz.APIServer.AddToRouter(r)
@@ -309,16 +304,25 @@ func (s *Server) Start(ctx context.Context) error {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
default:
slog.Error("Could not start HTTP server", errors.Attr(err))
slog.Error("Could not start HTTP server", "error", err)
}
s.unavailableChannel <- healthcheck.Unavailable
}()
go func() {
slog.Info("Starting pprof server", "addr", baseconst.DebugHttpPort)
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
if err != nil {
slog.Error("Could not start pprof server", "error", err)
}
}()
go func() {
slog.Info("Starting OpAmp Websocket server", "addr", baseconst.OpAmpWsEndpoint)
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
if err != nil {
slog.Error("opamp ws server failed to start", errors.Attr(err))
slog.Error("opamp ws server failed to start", "error", err)
s.unavailableChannel <- healthcheck.Unavailable
}
}()
@@ -345,29 +349,28 @@ func (s *Server) Stop(ctx context.Context) error {
return nil
}
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, metadataStore telemetrytypes.MetadataStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, ruleStateHistoryModule rulestatehistory.Module, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, metadataStore telemetrytypes.MetadataStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
// create manager opts
managerOpts := &baserules.ManagerOptions{
TelemetryStore: telemetryStore,
MetadataStore: metadataStore,
Prometheus: prometheus,
Context: context.Background(),
Reader: ch,
Querier: querier,
Logger: providerSettings.Logger,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc,
PrepareTestRuleFunc: rules.TestNotification,
Alertmanager: alertmanager,
OrgGetter: orgGetter,
RuleStore: ruleStore,
MaintenanceStore: maintenanceStore,
SqlStore: sqlstore,
QueryParser: queryParser,
RuleStateHistoryModule: ruleStateHistoryModule,
TelemetryStore: telemetryStore,
MetadataStore: metadataStore,
Prometheus: prometheus,
Context: context.Background(),
Reader: ch,
Querier: querier,
Logger: providerSettings.Logger,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc,
PrepareTestRuleFunc: rules.TestNotification,
Alertmanager: alertmanager,
OrgGetter: orgGetter,
RuleStore: ruleStore,
MaintenanceStore: maintenanceStore,
SqlStore: sqlstore,
QueryParser: queryParser,
}
// create Manager

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
@@ -309,7 +308,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
if filterErr != nil {
r.logger.ErrorContext(ctx, "Error filtering new series, ", errors.Attr(filterErr), "rule_name", r.Name())
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
} else {
seriesToProcess = filteredSeries
}
@@ -392,7 +391,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
r.logger.ErrorContext(ctx, "Expanding alert template failed", errors.Attr(err), "data", tmplData, "rule_name", r.Name())
r.logger.ErrorContext(ctx, "Expanding alert template failed", "error", err, "data", tmplData, "rule_name", r.Name())
}
return result
}
@@ -468,7 +467,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "labels", a.Labels)
r.logger.ErrorContext(ctx, "error marshaling labels", "error", err, "labels", a.Labels)
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given

View File

@@ -6,16 +6,14 @@ import (
"time"
"log/slog"
"github.com/google/uuid"
"github.com/SigNoz/signoz/pkg/errors"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"log/slog"
)
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
@@ -28,7 +26,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
if err != nil {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
}
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
// create a threshold rule
tr, err := baserules.NewThresholdRule(
@@ -42,7 +39,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
baserules.WithSQLStore(opts.SQLStore),
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
baserules.WithRuleStateHistoryModule(opts.ManagerOpts.RuleStateHistoryModule),
)
if err != nil {
@@ -67,7 +63,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
baserules.WithSQLStore(opts.SQLStore),
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
baserules.WithRuleStateHistoryModule(opts.ManagerOpts.RuleStateHistoryModule),
)
if err != nil {
@@ -93,7 +88,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
baserules.WithSQLStore(opts.SQLStore),
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
baserules.WithRuleStateHistoryModule(opts.ManagerOpts.RuleStateHistoryModule),
)
if err != nil {
return task, err
@@ -157,7 +151,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
)
if err != nil {
slog.Error("failed to prepare a new threshold rule for test", "name", alertname, errors.Attr(err))
slog.Error("failed to prepare a new threshold rule for test", "name", alertname, "error", err)
return 0, basemodel.BadRequest(err)
}
@@ -179,7 +173,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
)
if err != nil {
slog.Error("failed to prepare a new promql rule for test", "name", alertname, errors.Attr(err))
slog.Error("failed to prepare a new promql rule for test", "name", alertname, "error", err)
return 0, basemodel.BadRequest(err)
}
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
@@ -199,7 +193,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
)
if err != nil {
slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, errors.Attr(err))
slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, "error", err)
return 0, basemodel.BadRequest(err)
}
} else {
@@ -211,7 +205,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
alertsFound, err := rule.Eval(ctx, ts)
if err != nil {
slog.Error("evaluating rule failed", "rule", rule.Name(), errors.Attr(err))
slog.Error("evaluating rule failed", "rule", rule.Name(), "error", err)
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
}
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)

View File

@@ -257,7 +257,7 @@ func TestManager_TestNotification_SendUnmatched_PromRule(t *testing.T) {
WillReturnRows(samplesRows)
// Create Prometheus provider for this test
promProvider = prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{Timeout: 2 * time.Minute}, store)
promProvider = prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, store)
},
ManagerOptionsHook: func(opts *rules.ManagerOptions) {
// Set Prometheus provider for PromQL queries

View File

@@ -15,7 +15,6 @@ import (
"github.com/google/uuid"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
@@ -77,14 +76,14 @@ func (lm *Manager) Start(ctx context.Context) error {
func (lm *Manager) UploadUsage(ctx context.Context) {
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
if err != nil {
slog.ErrorContext(ctx, "failed to get organizations", errors.Attr(err))
slog.ErrorContext(ctx, "failed to get organizations", "error", err)
return
}
for _, organization := range organizations {
// check if license is present or not
license, err := lm.licenseService.GetActive(ctx, organization.ID)
if err != nil {
slog.ErrorContext(ctx, "failed to get active license", errors.Attr(err))
slog.ErrorContext(ctx, "failed to get active license", "error", err)
return
}
if license == nil {
@@ -116,7 +115,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
slog.ErrorContext(ctx, "failed to get usage from clickhouse", errors.Attr(err))
slog.ErrorContext(ctx, "failed to get usage from clickhouse", "error", err)
return
}
for _, u := range dbusages {
@@ -136,14 +135,14 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
slog.ErrorContext(ctx, "error while decrypting usage data", errors.Attr(err))
slog.ErrorContext(ctx, "error while decrypting usage data", "error", err)
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
slog.ErrorContext(ctx, "error while unmarshalling usage data", errors.Attr(err))
slog.ErrorContext(ctx, "error while unmarshalling usage data", "error", err)
return
}
@@ -164,13 +163,13 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
body, errv2 := json.Marshal(payload)
if errv2 != nil {
slog.ErrorContext(ctx, "error while marshalling usage payload", errors.Attr(errv2))
slog.ErrorContext(ctx, "error while marshalling usage payload", "error", errv2)
return
}
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
if errv2 != nil {
slog.ErrorContext(ctx, "failed to upload usage", errors.Attr(errv2))
slog.ErrorContext(ctx, "failed to upload usage", "error", errv2)
// not returning error here since it is captured in the failed count
return
}

View File

@@ -4,12 +4,11 @@ import (
"context"
"database/sql"
"github.com/uptrace/bun"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type provider struct {
@@ -114,7 +113,7 @@ WHERE
defer func() {
if err := constraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
@@ -175,7 +174,7 @@ WHERE
defer func() {
if err := foreignKeyConstraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
@@ -244,7 +243,7 @@ ORDER BY index_name, column_position`, string(name))
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()

View File

@@ -336,10 +336,9 @@ func (dialect *dialect) UpdatePrimaryKey(ctx context.Context, bun bun.IDB, oldMo
}
fkReference := ""
switch reference {
case Org:
if reference == Org {
fkReference = OrgReference
case User:
} else if reference == User {
fkReference = UserReference
}
@@ -393,10 +392,9 @@ func (dialect *dialect) AddPrimaryKey(ctx context.Context, bun bun.IDB, oldModel
}
fkReference := ""
switch reference {
case Org:
if reference == Org {
fkReference = OrgReference
case User:
} else if reference == User {
fkReference = UserReference
}

View File

@@ -14,21 +14,14 @@ import (
"github.com/uptrace/bun/dialect/pgdialect"
)
var _ Pooler = new(provider)
type provider struct {
settings factory.ScopedProviderSettings
sqldb *sql.DB
bundb *sqlstore.BunDB
pgxPool *pgxpool.Pool
dialect *dialect
formatter sqlstore.SQLFormatter
}
type Pooler interface {
Pool() *pgxpool.Pool
}
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
return factory.NewProviderFactory(factory.MustNewName("postgres"), func(ctx context.Context, providerSettings factory.ProviderSettings, config sqlstore.Config) (sqlstore.SQLStore, error) {
hooks := make([]sqlstore.SQLStoreHook, len(hookFactories))
@@ -69,7 +62,6 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
settings: settings,
sqldb: sqldb,
bundb: bunDB,
pgxPool: pool,
dialect: new(dialect),
formatter: newFormatter(bunDB.Dialect()),
}, nil
@@ -83,10 +75,6 @@ func (provider *provider) SQLDB() *sql.DB {
return provider.sqldb
}
func (provider *provider) Pool() *pgxpool.Pool {
return provider.pgxPool
}
func (provider *provider) Dialect() sqlstore.SQLDialect {
return provider.dialect
}

View File

@@ -19,7 +19,7 @@ var (
once sync.Once
)
// initializes the Zeus configuration.
// initializes the Zeus configuration
func Config() zeus.Config {
once.Do(func() {
parsedURL, err := neturl.Parse(url)

View File

@@ -189,7 +189,7 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
}
// This can be taken down to the client package.
// This can be taken down to the client package
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
switch statusCode {
case http.StatusBadRequest:

View File

@@ -1,97 +0,0 @@
---
globs: **/*.store.ts
alwaysApply: false
---
# State Management: React Query, nuqs, Zustand
Use the following stack. Do **not** introduce or recommend Redux or React Context for shared/global state.
## Server state → React Query
- **Use for:** API responses, time-series data, caching, background refetch, retries, stale/refresh.
- **Do not use Redux/Context** to store or mirror data that comes from React Query (e.g. do not dispatch API results into Redux).
- Prefer generated React Query hooks from `frontend/src/api/generated` when available.
- Keep server state in React Query; expose it via hooks that return the query result (and optionally memoized derived values). Do not duplicate it in Redux or Context.
```tsx
// ✅ GOOD: single source of truth from React Query
export function useAppStateHook() {
const { data, isError } = useQuery(...)
const memoizedConfigs = useMemo(() => ({ ... }), [data?.configs])
return { configs: memoizedConfigs, isError, ... }
}
// ❌ BAD: copying React Query result into Redux
dispatch({ type: UPDATE_LATEST_VERSION, payload: queryResponse.data })
```
## URL state → nuqs
- **Use for:** shareable state, filters, time range, selected values, pagination, view state that belongs in the URL.
- **Do not use Redux/Context** for state that should be shareable or reflected in the URL.
- Use [nuqs](https://nuqs.dev/docs/basic-usage) for typed, type-safe URL search params. Avoid ad-hoc `useSearchParams` encoding/decoding.
- Keep URL payload small; respect browser URL length limits (e.g. Chrome ~2k chars). Do not put large datasets or sensitive data in query params.
```tsx
// ✅ GOOD: nuqs for filters / time range / selection
const [timeRange, setTimeRange] = useQueryState('timeRange', parseAsString.withDefault('1h'))
const [page, setPage] = useQueryState('page', parseAsInteger.withDefault(1))
// ❌ BAD: Redux/Context for shareable or URL-synced state
const { timeRange } = useContext(SomeContext)
```
## Client state → Zustand
- **Use for:** global/client state, cross-component state, feature flags, complex or large client objects (e.g. dashboard state, query builder state).
- **Do not use Redux or React Context** for global or feature-level client state.
- Prefer small, domain-scoped stores (e.g. DashboardStore, QueryBuilderStore).
### Zustand best practices (align with eslint-plugin-zustand-rules)
- **One store per module.** Do not define multiple `create()` calls in the same file; use one store per module (or compose slices into one store).
- **Always use selectors.** Call the store hook with a selector so only the used slice triggers re-renders. Never use `useStore()` with no selector.
```tsx
// ✅ GOOD: selector — re-renders only when isDashboardLocked changes
const isLocked = useDashboardStore(state => state.isDashboardLocked)
// ❌ BAD: no selector — re-renders on any store change
const state = useDashboardStore()
```
- **Never mutate state directly.** Update only via `set` or `setState` (or `getState()` + `set` for reads). No `state.foo = x` or `state.bears += 1` inside actions.
```tsx
// ✅ GOOD: use set
increment: () => set(state => ({ bears: state.bears + 1 }))
// ❌ BAD: direct mutation
increment: () => { state.bears += 1 }
```
- **State properties before actions.** In the store object, list all state fields first, then action functions.
- **Split into slices when state is large.** If a store has many top-level properties (e.g. more than 510), split into slice factories and combine with one `create()`.
```tsx
// ✅ GOOD: slices for large state
const createBearSlice = set => ({ bears: 0, addBear: () => set(s => ({ bears: s.bears + 1 })) })
const createFishSlice = set => ({ fish: 0, addFish: () => set(s => ({ fish: s.fish + 1 })) })
const useStore = create(set => ({ ...createBearSlice(set), ...createFishSlice(set) }))
```
- **In projects using Zustand:** add `eslint-plugin-zustand-rules` and extend `plugin:zustand-rules/recommended` to enforce these rules automatically.
## Local state → React state only
- **Use useState/useReducer** for: component-local UI state, form inputs, toggles, hover state, data that never leaves the component.
- Do not use Zustand, Redux, or Context for state that is purely local to one component or a small subtree.
## Summary
| State type | Use | Avoid |
|-------------------|------------------|--------------------|
| Server / API | React Query | Redux, Context |
| URL / shareable | nuqs | Redux, Context |
| Global client | Zustand | Redux, Context |
| Local UI | useState/useReducer | Zustand, Redux, Context |

View File

@@ -1,150 +0,0 @@
---
name: migrate-state-management
description: Migrate Redux or React Context to the correct state option (React Query for server state, nuqs for URL/shareable state, Zustand for global client state). Use when refactoring away from Redux/Context, moving state to the right store, or when the user asks to migrate state management.
---
# Migrate State: Redux/Context → React Query, nuqs, Zustand
Do **not** introduce or recommend Redux or React Context. Migrate existing usage to the stack below.
## 1. Classify the state
Before changing code, classify what the state represents:
| If the state is… | Migrate to | Do not use |
|------------------|------------|------------|
| From API / server (versions, configs, fetched lists, time-series) | **React Query** | Redux, Context |
| Shareable via URL (filters, time range, page, selected ids) | **nuqs** | Redux, Context |
| Global/client UI (dashboard lock, query builder, feature flags, large client objects) | **Zustand** | Redux, Context |
| Local to one component (inputs, toggles, hover) | **useState / useReducer** | Zustand, Redux, Context |
If one slice mixes concerns (e.g. Redux has both API data and pagination), split: API → React Query, pagination → nuqs, rest → Zustand or local state.
## 2. Migrate to React Query (server state)
**When:** State comes from or mirrors an API response (e.g. `currentVersion`, `latestVersion`, `configs`, lists).
**Steps:**
1. Find where the data is fetched (existing `useQuery`/API call) and where it is dispatched or set in Context/Redux.
2. Remove the dispatch/set that writes API results into Redux/Context.
3. Expose a single hook that uses the query and returns the same shape consumers expect (use `useMemo` for derived objects like `configs` to avoid unnecessary re-renders).
4. Replace Redux/Context consumption with the new hook. Prefer generated React Query hooks from `frontend/src/api/generated` when available.
5. Configure cache/refetch (e.g. `refetchOnMount: false`, `staleTime`) so behavior matches previous “single source” expectations.
**Before (Redux mirroring React Query):**
```tsx
if (getUserLatestVersionResponse.isFetched && getUserLatestVersionResponse.isSuccess && getUserLatestVersionResponse.data?.payload) {
dispatch({ type: UPDATE_LATEST_VERSION, payload: { latestVersion: getUserLatestVersionResponse.data.payload.tag_name } })
}
```
**After (single source in React Query):**
```tsx
export function useAppStateHook() {
const { data, isError } = useQuery(...)
const memoizedConfigs = useMemo(() => ({ ... }), [data?.configs])
return {
latestVersion: data?.payload?.tag_name,
configs: memoizedConfigs,
isError,
}
}
```
Consumers use `useAppStateHook()` instead of `useSelector` or Context. Do not copy React Query result into Redux or Context.
## 3. Migrate to nuqs (URL / shareable state)
**When:** State should be in the URL: filters, time range, pagination, selected values, view state. Keep payload small (e.g. Chrome ~2k chars); no large datasets or sensitive data.
**Steps:**
1. Identify which Redux/Context fields are shareable or already reflected in the URL (e.g. `currentPage`, `timeRange`, `selectedFilter`).
2. Add nuqs (or use existing): `useQueryState('param', parseAsString.withDefault('…'))` (or `parseAsInteger`, etc.).
3. Replace reads/writes of those fields with nuqs hooks. Use typed parsers; avoid ad-hoc `useSearchParams` encoding/decoding.
4. Remove the same fields from Redux/Context and their reducers/providers.
**Before (Context/Redux):**
```tsx
const { timeRange } = useContext(SomeContext)
const [page, setPage] = useDispatch(...)
```
**After (nuqs):**
```tsx
const [timeRange, setTimeRange] = useQueryState('timeRange', parseAsString.withDefault('1h'))
const [page, setPage] = useQueryState('page', parseAsInteger.withDefault(1))
```
## 4. Migrate to Zustand (global client state)
**When:** State is global or cross-component client state: feature flags, dashboard state, query builder state, complex/large client objects (e.g. up to ~1.52MB). Not for server cache or local-only UI.
**Steps:**
1. Create one store per domain (e.g. `DashboardStore`, `QueryBuilderStore`). One `create()` per module; for large state use slice factories and combine.
2. Put state properties first, then actions. Use `set` (or `setState` / `getState()` + `set`) for updates; never mutate state directly.
3. Replace Context/Redux consumption with the store hook **and a selector** so only the used slice triggers re-renders.
4. Remove the old Context provider / Redux slice and related dispatches.
**Selector (required):**
```tsx
const isLocked = useDashboardStore(state => state.isDashboardLocked)
```
Never use `useStore()` with no selector. Never do `state.foo = x` inside actions; use `set(state => ({ ... }))`.
**Before (Context/Redux):**
```tsx
const { isDashboardLocked, setLocked } = useContext(DashboardContext)
```
**After (Zustand):**
```tsx
const isLocked = useDashboardStore(state => state.isDashboardLocked)
const setLocked = useDashboardStore(state => state.setLocked)
```
For large stores (many top-level fields), split into slices and combine:
```tsx
const createBearSlice = set => ({ bears: 0, addBear: () => set(s => ({ bears: s.bears + 1 })) })
const useStore = create(set => ({ ...createBearSlice(set), ...createFishSlice(set) }))
```
Add `eslint-plugin-zustand-rules` with `plugin:zustand-rules/recommended` to enforce selectors and no direct mutation.
## 5. Migrate to local state (useState / useReducer)
**When:** State is used only inside one component or a small subtree (form inputs, toggles, hover, panel selection). No URL sync, no cross-feature sharing.
**Steps:**
1. Move the state into the component that owns it (or the smallest common parent).
2. Use `useState` or `useReducer` (useReducer when multiple related fields change together).
3. Remove from Redux/Context and any provider/slice.
Do not use Zustand, Redux, or Context for purely local UI state.
## 6. Migration checklist
- [ ] Classify each piece of state (server / URL / global client / local).
- [ ] Server state: move to React Query; expose via hook; remove Redux/Context mirroring.
- [ ] URL state: move to nuqs; remove from Redux/Context; keep URL payload small.
- [ ] Global client state: move to Zustand with selectors and immutable updates; one store per domain.
- [ ] Local state: move to useState/useReducer in the owning component.
- [ ] Remove old Redux slices / Context providers and all dispatches/consumers for migrated state.
- [ ] Do not duplicate the same data in multiple places (e.g. React Query + Redux).
## Additional resources
- Project rule: [.cursor/rules/state-management.mdc](../../rules/state-management.mdc)
- Detailed patterns and rationale: [reference.md](reference.md)

View File

@@ -1,50 +0,0 @@
# State migration reference
## Why migrate
- **Context:** Re-renders all consumers on any change; no granular subscriptions; becomes brittle at scale.
- **Redux:** Heavy boilerplate (actions, reducers, selectors, Provider); slower onboarding; often used to mirror React Query or URL state.
- **Goal:** Fewer mechanisms, domain isolation, granular subscriptions, single source of truth per state type.
## React Query migration (server state)
Typical anti-pattern: API is called via React Query, then result is dispatched to Redux. Flow becomes: Component → useQueries → API → dispatch → Reducer → Redux state → useSelector.
Correct flow: Component → useQuery (or custom hook wrapping it) → same component reads from hook. No Redux/Context in between.
- Prefer generated hooks from `frontend/src/api/generated`.
- For “app state” that is just API data (versions, configs), one hook that returns `{ ...data, configs: useMemo(...) }` is enough. No selectors needed for plain data; useMemo only where the value is used as dependency (e.g. in useState).
- Set `staleTime` / `refetchOnMount` etc. so refetch behavior matches previous expectations.
## nuqs migration (URL state)
Redux/Context often hold pagination, filters, time range, selected values that are shareable. Those belong in the URL.
- Use [nuqs](https://nuqs.dev/docs/basic-usage) for typed search params. Avoid ad-hoc `useSearchParams` + manual encoding.
- Browser limits: Chrome ~2k chars practical; keep payload small; no large datasets or secrets in query params.
- If the app uses TanStack Router, search params can be handled there; otherwise nuqs is the standard.
## Zustand migration (client state)
- One store per domain (e.g. DashboardStore, QueryBuilderStore). Multiple `create()` in one file is disallowed; use one store or composed slices.
- Always use a selector: `useStore(s => s.field)` so only that field drives re-renders.
- Never mutate: update only via `set(state => ({ ... }))` or `setState` / `getState()` + `set`.
- State properties first, then actions. For 510+ top-level fields, split into slice factories and combine with one `create()`.
- Large client objects: Zustand is for “large” in the ~1.52MB range; above that, optimize at API/store design.
- Testing: no Provider; stores are plain functions; easy to reset and mock.
## What not to use
- **Redux / Context** for new or migrated shared/global state.
- **Redux / Context** to store or mirror React Query results.
- **Redux / Context** for state that should live in the URL (use nuqs).
- **Zustand / Redux / Context** for component-local UI (use useState/useReducer).
## Summary table
| State type | Use | Avoid |
|-------------|--------------------|-----------------|
| Server/API | React Query | Redux, Context |
| URL/shareable | nuqs | Redux, Context |
| Global client | Zustand | Redux, Context |
| Local UI | useState/useReducer | Zustand, Redux, Context |

View File

@@ -205,25 +205,6 @@ module.exports = {
],
},
overrides: [
{
files: ['src/**/*.{jsx,tsx,ts}'],
excludedFiles: [
'**/*.test.{js,jsx,ts,tsx}',
'**/*.spec.{js,jsx,ts,tsx}',
'**/__tests__/**/*.{js,jsx,ts,tsx}',
],
rules: {
'no-restricted-properties': [
'error',
{
object: 'navigator',
property: 'clipboard',
message:
'Do not use navigator.clipboard directly since it does not work well with specific browsers. Use hook useCopyToClipboard from react-use library. https://streamich.github.io/react-use/?path=/story/side-effects-usecopytoclipboard--docs',
},
],
},
},
{
files: [
'**/*.test.{js,jsx,ts,tsx}',

View File

@@ -12,7 +12,7 @@
or
`docker build . -t tagname`
**Tag to remote url- Introduce versioning later on**
**Tag to remote url- Introduce versinoing later on**
```
docker tag signoz/frontend:latest 7296823551/signoz:latest

View File

@@ -2,7 +2,6 @@
interface SafeNavigateOptions {
replace?: boolean;
state?: unknown;
newTab?: boolean;
}
interface SafeNavigateTo {
@@ -21,7 +20,9 @@ interface UseSafeNavigateReturn {
export const useSafeNavigate = (): UseSafeNavigateReturn => ({
safeNavigate: jest.fn(
(_to: SafeNavigateToType, _options?: SafeNavigateOptions) => {},
(to: SafeNavigateToType, options?: SafeNavigateOptions) => {
console.log(`Mock safeNavigate called with:`, to, options);
},
) as jest.MockedFunction<
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
>,

View File

@@ -137,7 +137,6 @@
"react-full-screen": "1.1.1",
"react-grid-layout": "^1.3.4",
"react-helmet-async": "1.3.0",
"react-hook-form": "7.71.2",
"react-i18next": "^11.16.1",
"react-lottie": "1.2.10",
"react-markdown": "8.0.7",
@@ -164,7 +163,6 @@
"vite-plugin-html": "3.2.2",
"web-vitals": "^0.2.4",
"xstate": "^4.31.0",
"zod": "4.3.6",
"zustand": "5.0.11"
},
"browserslist": {
@@ -287,4 +285,4 @@
"tmp": "0.2.4",
"vite": "npm:rolldown-vite@7.3.1"
}
}
}

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#fa520f" viewBox="0 0 24 24"><title>Mistral AI</title><path d="M17.143 3.429v3.428h-3.429v3.429h-3.428V6.857H6.857V3.43H3.43v13.714H0v3.428h10.286v-3.428H6.857v-3.429h3.429v3.429h3.429v-3.429h3.428v3.429h-3.428v3.428H24v-3.428h-3.43V3.429z"/></svg>

Before

Width:  |  Height:  |  Size: 294 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 120 120"><defs><linearGradient id="a" x1="0%" x2="100%" y1="0%" y2="100%"><stop offset="0%" stop-color="#ff4d4d"/><stop offset="100%" stop-color="#991b1b"/></linearGradient></defs><path fill="url(#a)" d="M60 10c-30 0-45 25-45 45s15 40 30 45v10h10v-10s5 2 10 0v10h10v-10c15-5 30-25 30-45S90 10 60 10"/><path fill="url(#a)" d="M20 45C5 40 0 50 5 60s15 5 20-5c3-7 0-10-5-10"/><path fill="url(#a)" d="M100 45c15-5 20 5 15 15s-15 5-20-5c-3-7 0-10 5-10"/><path stroke="#ff4d4d" stroke-linecap="round" stroke-width="3" d="M45 15Q35 5 30 8M75 15Q85 5 90 8"/><circle cx="45" cy="35" r="6" fill="#050810"/><circle cx="75" cy="35" r="6" fill="#050810"/><circle cx="46" cy="34" r="2.5" fill="#00e5cc"/><circle cx="76" cy="34" r="2.5" fill="#00e5cc"/></svg>

Before

Width:  |  Height:  |  Size: 809 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><title>Render</title><path d="M18.263.007c-3.121-.147-5.744 2.109-6.192 5.082-.018.138-.045.272-.067.405-.696 3.703-3.936 6.507-7.827 6.507a7.9 7.9 0 0 1-3.825-.979.202.202 0 0 0-.302.178V24H12v-8.999c0-1.656 1.338-3 2.987-3h2.988c3.382 0 6.103-2.817 5.97-6.244-.12-3.084-2.61-5.603-5.682-5.75"/></svg>

Before

Width:  |  Height:  |  Size: 362 B

View File

@@ -15,6 +15,5 @@
"logs_to_metrics": "Logs To Metrics",
"roles": "Roles",
"role_details": "Role Details",
"members": "Members",
"service_accounts": "Service Accounts"
"members": "Members"
}

View File

@@ -50,8 +50,5 @@
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
"METER_EXPLORER": "SigNoz | Meter Explorer",
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
"METER": "SigNoz | Meter",
"ROLES_SETTINGS": "SigNoz | Roles",
"MEMBERS_SETTINGS": "SigNoz | Members",
"SERVICE_ACCOUNTS_SETTINGS": "SigNoz | Service Accounts"
"METER": "SigNoz | Meter"
}

View File

@@ -15,6 +15,5 @@
"logs_to_metrics": "Logs To Metrics",
"roles": "Roles",
"role_details": "Role Details",
"members": "Members",
"service_accounts": "Service Accounts"
"members": "Members"
}

View File

@@ -75,6 +75,5 @@
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
"METER": "SigNoz | Meter",
"ROLES_SETTINGS": "SigNoz | Roles",
"MEMBERS_SETTINGS": "SigNoz | Members",
"SERVICE_ACCOUNTS_SETTINGS": "SigNoz | Service Accounts"
"MEMBERS_SETTINGS": "SigNoz | Members"
}

View File

@@ -101,22 +101,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
)?.value;
// Don't redirect to onboarding if workspace has issues (blocked, suspended, or restricted)
// User needs access to settings/billing to fix payment issues
const isWorkspaceBlocked = trialInfo?.workSpaceBlock;
const isWorkspaceSuspended = activeLicense?.state === LicenseState.DEFAULTED;
const isWorkspaceAccessRestricted =
activeLicense?.state === LicenseState.TERMINATED ||
activeLicense?.state === LicenseState.EXPIRED ||
activeLicense?.state === LicenseState.CANCELLED;
const hasWorkspaceIssue =
isWorkspaceBlocked || isWorkspaceSuspended || isWorkspaceAccessRestricted;
if (hasWorkspaceIssue) {
return;
}
const isFirstUser = checkFirstTimeUser();
if (
isFirstUser &&
@@ -135,36 +119,40 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
orgPreferences,
usersData,
pathname,
trialInfo?.workSpaceBlock,
activeLicense?.state,
]);
const navigateToWorkSpaceBlocked = useCallback((): void => {
const navigateToWorkSpaceBlocked = (route: any): void => {
const { path } = route;
const isRouteEnabledForWorkspaceBlockedState =
isAdmin &&
(pathname === ROUTES.SETTINGS ||
pathname === ROUTES.ORG_SETTINGS ||
pathname === ROUTES.MEMBERS_SETTINGS ||
pathname === ROUTES.BILLING ||
pathname === ROUTES.MY_SETTINGS);
(path === ROUTES.SETTINGS ||
path === ROUTES.ORG_SETTINGS ||
path === ROUTES.MEMBERS_SETTINGS ||
path === ROUTES.BILLING ||
path === ROUTES.MY_SETTINGS);
if (
pathname &&
pathname !== ROUTES.WORKSPACE_LOCKED &&
path &&
path !== ROUTES.WORKSPACE_LOCKED &&
!isRouteEnabledForWorkspaceBlockedState
) {
history.push(ROUTES.WORKSPACE_LOCKED);
}
}, [isAdmin, pathname]);
};
const navigateToWorkSpaceAccessRestricted = useCallback((): void => {
if (pathname && pathname !== ROUTES.WORKSPACE_ACCESS_RESTRICTED) {
const navigateToWorkSpaceAccessRestricted = (route: any): void => {
const { path } = route;
if (path && path !== ROUTES.WORKSPACE_ACCESS_RESTRICTED) {
history.push(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
}
}, [pathname]);
};
useEffect(() => {
if (!isFetchingActiveLicense && activeLicense) {
const currentRoute = mapRoutes.get('current');
const isTerminated = activeLicense.state === LicenseState.TERMINATED;
const isExpired = activeLicense.state === LicenseState.EXPIRED;
const isCancelled = activeLicense.state === LicenseState.CANCELLED;
@@ -173,53 +161,61 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
const { platform } = activeLicense;
if (isWorkspaceAccessRestricted && platform === LicensePlatform.CLOUD) {
navigateToWorkSpaceAccessRestricted();
if (
isWorkspaceAccessRestricted &&
platform === LicensePlatform.CLOUD &&
currentRoute
) {
navigateToWorkSpaceAccessRestricted(currentRoute);
}
}
}, [
isFetchingActiveLicense,
activeLicense,
navigateToWorkSpaceAccessRestricted,
]);
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
useEffect(() => {
if (!isFetchingActiveLicense) {
const currentRoute = mapRoutes.get('current');
const shouldBlockWorkspace = trialInfo?.workSpaceBlock;
if (
shouldBlockWorkspace &&
currentRoute &&
activeLicense?.platform === LicensePlatform.CLOUD
) {
navigateToWorkSpaceBlocked();
navigateToWorkSpaceBlocked(currentRoute);
}
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
isFetchingActiveLicense,
trialInfo?.workSpaceBlock,
activeLicense?.platform,
navigateToWorkSpaceBlocked,
mapRoutes,
pathname,
]);
const navigateToWorkSpaceSuspended = useCallback((): void => {
if (pathname && pathname !== ROUTES.WORKSPACE_SUSPENDED) {
const navigateToWorkSpaceSuspended = (route: any): void => {
const { path } = route;
if (path && path !== ROUTES.WORKSPACE_SUSPENDED) {
history.push(ROUTES.WORKSPACE_SUSPENDED);
}
}, [pathname]);
};
useEffect(() => {
if (!isFetchingActiveLicense && activeLicense) {
const currentRoute = mapRoutes.get('current');
const shouldSuspendWorkspace =
activeLicense.state === LicenseState.DEFAULTED;
if (
shouldSuspendWorkspace &&
currentRoute &&
activeLicense.platform === LicensePlatform.CLOUD
) {
navigateToWorkSpaceSuspended();
navigateToWorkSpaceSuspended(currentRoute);
}
}
}, [isFetchingActiveLicense, activeLicense, navigateToWorkSpaceSuspended]);
}, [isFetchingActiveLicense, activeLicense, mapRoutes, pathname]);
useEffect(() => {
if (org && org.length > 0 && org[0].id !== undefined) {

File diff suppressed because it is too large Load Diff

View File

@@ -157,6 +157,10 @@ export const IngestionSettings = Loadable(
() => import(/* webpackChunkName: "Ingestion Settings" */ 'pages/Settings'),
);
export const APIKeys = Loadable(
() => import(/* webpackChunkName: "All Settings" */ 'pages/Settings'),
);
export const MySettings = Loadable(
() => import(/* webpackChunkName: "All MySettings" */ 'pages/Settings'),
);

View File

@@ -513,7 +513,6 @@ export const oldRoutes = [
'/logs-save-views',
'/traces-save-views',
'/settings/access-tokens',
'/settings/api-keys',
'/messaging-queues',
'/alerts/edit',
];
@@ -524,8 +523,7 @@ export const oldNewRoutesMapping: Record<string, string> = {
'/logs-explorer/live': '/logs/logs-explorer/live',
'/logs-save-views': '/logs/saved-views',
'/traces-save-views': '/traces/saved-views',
'/settings/access-tokens': '/settings/service-accounts',
'/settings/api-keys': '/settings/service-accounts',
'/settings/access-tokens': '/settings/api-keys',
'/messaging-queues': '/messaging-queues/overview',
'/alerts/edit': '/alerts/overview',
};

View File

@@ -1,7 +1,6 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { DEFAULT_TIME_RANGE } from 'container/TopNav/DateTimeSelectionV2/constants';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { CreatePublicDashboardProps } from 'types/api/dashboard/public/create';
@@ -9,7 +8,7 @@ const createPublicDashboard = async (
props: CreatePublicDashboardProps,
): Promise<SuccessResponseV2<CreatePublicDashboardProps>> => {
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = DEFAULT_TIME_RANGE } = props;
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = '30m' } = props;
try {
const response = await axios.post(

View File

@@ -1,7 +1,6 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { DEFAULT_TIME_RANGE } from 'container/TopNav/DateTimeSelectionV2/constants';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { UpdatePublicDashboardProps } from 'types/api/dashboard/public/update';
@@ -9,7 +8,7 @@ const updatePublicDashboard = async (
props: UpdatePublicDashboardProps,
): Promise<SuccessResponseV2<UpdatePublicDashboardProps>> => {
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = DEFAULT_TIME_RANGE } = props;
const { dashboardId, timeRangeEnabled = false, defaultTimeRange = '30m' } = props;
try {
const response = await axios.put(

File diff suppressed because it is too large Load Diff

View File

@@ -1,250 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
QueryClient,
QueryFunction,
QueryKey,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useQuery } from 'react-query';
import type { ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
Healthz200,
Healthz503,
Livez200,
Readyz200,
Readyz503,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
/**
* @summary Health check
*/
export const healthz = (signal?: AbortSignal) => {
return GeneratedAPIInstance<Healthz200>({
url: `/api/v2/healthz`,
method: 'GET',
signal,
});
};
export const getHealthzQueryKey = () => {
return [`/api/v2/healthz`] as const;
};
export const getHealthzQueryOptions = <
TData = Awaited<ReturnType<typeof healthz>>,
TError = ErrorType<Healthz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof healthz>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getHealthzQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof healthz>>> = ({
signal,
}) => healthz(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof healthz>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type HealthzQueryResult = NonNullable<
Awaited<ReturnType<typeof healthz>>
>;
export type HealthzQueryError = ErrorType<Healthz503>;
/**
* @summary Health check
*/
export function useHealthz<
TData = Awaited<ReturnType<typeof healthz>>,
TError = ErrorType<Healthz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof healthz>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getHealthzQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Health check
*/
export const invalidateHealthz = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getHealthzQueryKey() },
options,
);
return queryClient;
};
/**
* @summary Liveness check
*/
export const livez = (signal?: AbortSignal) => {
return GeneratedAPIInstance<Livez200>({
url: `/api/v2/livez`,
method: 'GET',
signal,
});
};
export const getLivezQueryKey = () => {
return [`/api/v2/livez`] as const;
};
export const getLivezQueryOptions = <
TData = Awaited<ReturnType<typeof livez>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof livez>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getLivezQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof livez>>> = ({
signal,
}) => livez(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof livez>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type LivezQueryResult = NonNullable<Awaited<ReturnType<typeof livez>>>;
export type LivezQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Liveness check
*/
export function useLivez<
TData = Awaited<ReturnType<typeof livez>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof livez>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getLivezQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Liveness check
*/
export const invalidateLivez = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries({ queryKey: getLivezQueryKey() }, options);
return queryClient;
};
/**
* @summary Readiness check
*/
export const readyz = (signal?: AbortSignal) => {
return GeneratedAPIInstance<Readyz200>({
url: `/api/v2/readyz`,
method: 'GET',
signal,
});
};
export const getReadyzQueryKey = () => {
return [`/api/v2/readyz`] as const;
};
export const getReadyzQueryOptions = <
TData = Awaited<ReturnType<typeof readyz>>,
TError = ErrorType<Readyz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof readyz>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getReadyzQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof readyz>>> = ({
signal,
}) => readyz(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof readyz>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ReadyzQueryResult = NonNullable<Awaited<ReturnType<typeof readyz>>>;
export type ReadyzQueryError = ErrorType<Readyz503>;
/**
* @summary Readiness check
*/
export function useReadyz<
TData = Awaited<ReturnType<typeof readyz>>,
TError = ErrorType<Readyz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof readyz>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getReadyzQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Readiness check
*/
export const invalidateReadyz = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getReadyzQueryKey() },
options,
);
return queryClient;
};

View File

@@ -20,113 +20,11 @@ import { useMutation, useQuery } from 'react-query';
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
HandleExportRawDataPOSTParams,
ListPromotedAndIndexedPaths200,
PromotetypesPromotePathDTO,
Querybuildertypesv5QueryRangeRequestDTO,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
/**
* This endpoints allows complex query exporting raw data for traces and logs
* @summary Export raw data
*/
export const handleExportRawDataPOST = (
querybuildertypesv5QueryRangeRequestDTO: BodyType<Querybuildertypesv5QueryRangeRequestDTO>,
params?: HandleExportRawDataPOSTParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/export_raw_data`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
params,
signal,
});
};
export const getHandleExportRawDataPOSTMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
> => {
const mutationKey = ['handleExportRawDataPOST'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
}
> = (props) => {
const { data, params } = props ?? {};
return handleExportRawDataPOST(data, params);
};
return { mutationFn, ...mutationOptions };
};
export type HandleExportRawDataPOSTMutationResult = NonNullable<
Awaited<ReturnType<typeof handleExportRawDataPOST>>
>;
export type HandleExportRawDataPOSTMutationBody = BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
export type HandleExportRawDataPOSTMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Export raw data
*/
export const useHandleExportRawDataPOST = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
> => {
const mutationOptions = getHandleExportRawDataPOSTMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoints promotes and indexes paths
* @summary Promote and index paths

View File

@@ -31,13 +31,10 @@ import type {
GetMetricHighlightsPathParameters,
GetMetricMetadata200,
GetMetricMetadataPathParameters,
GetMetricsOnboardingStatus200,
GetMetricsStats200,
GetMetricsTreemap200,
InspectMetrics200,
ListMetrics200,
ListMetricsParams,
MetricsexplorertypesInspectMetricsRequestDTO,
MetricsexplorertypesStatsRequestDTO,
MetricsexplorertypesTreemapRequestDTO,
MetricsexplorertypesUpdateMetricMetadataRequestDTO,
@@ -781,176 +778,6 @@ export const useUpdateMetricMetadata = <
return useMutation(mutationOptions);
};
/**
* Returns raw time series data points for a metric within a time range (max 30 minutes). Each series includes labels and timestamp/value pairs.
* @summary Inspect raw metric data points
*/
export const inspectMetrics = (
metricsexplorertypesInspectMetricsRequestDTO: BodyType<MetricsexplorertypesInspectMetricsRequestDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<InspectMetrics200>({
url: `/api/v2/metrics/inspect`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: metricsexplorertypesInspectMetricsRequestDTO,
signal,
});
};
export const getInspectMetricsMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof inspectMetrics>>,
TError,
{ data: BodyType<MetricsexplorertypesInspectMetricsRequestDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof inspectMetrics>>,
TError,
{ data: BodyType<MetricsexplorertypesInspectMetricsRequestDTO> },
TContext
> => {
const mutationKey = ['inspectMetrics'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof inspectMetrics>>,
{ data: BodyType<MetricsexplorertypesInspectMetricsRequestDTO> }
> = (props) => {
const { data } = props ?? {};
return inspectMetrics(data);
};
return { mutationFn, ...mutationOptions };
};
export type InspectMetricsMutationResult = NonNullable<
Awaited<ReturnType<typeof inspectMetrics>>
>;
export type InspectMetricsMutationBody = BodyType<MetricsexplorertypesInspectMetricsRequestDTO>;
export type InspectMetricsMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Inspect raw metric data points
*/
export const useInspectMetrics = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof inspectMetrics>>,
TError,
{ data: BodyType<MetricsexplorertypesInspectMetricsRequestDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof inspectMetrics>>,
TError,
{ data: BodyType<MetricsexplorertypesInspectMetricsRequestDTO> },
TContext
> => {
const mutationOptions = getInspectMetricsMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* Lightweight endpoint that checks if any non-SigNoz metrics have been ingested, used for onboarding status detection
* @summary Check if non-SigNoz metrics have been received
*/
export const getMetricsOnboardingStatus = (signal?: AbortSignal) => {
return GeneratedAPIInstance<GetMetricsOnboardingStatus200>({
url: `/api/v2/metrics/onboarding`,
method: 'GET',
signal,
});
};
export const getGetMetricsOnboardingStatusQueryKey = () => {
return [`/api/v2/metrics/onboarding`] as const;
};
export const getGetMetricsOnboardingStatusQueryOptions = <
TData = Awaited<ReturnType<typeof getMetricsOnboardingStatus>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricsOnboardingStatus>>,
TError,
TData
>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetMetricsOnboardingStatusQueryKey();
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMetricsOnboardingStatus>>
> = ({ signal }) => getMetricsOnboardingStatus(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMetricsOnboardingStatus>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMetricsOnboardingStatusQueryResult = NonNullable<
Awaited<ReturnType<typeof getMetricsOnboardingStatus>>
>;
export type GetMetricsOnboardingStatusQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Check if non-SigNoz metrics have been received
*/
export function useGetMetricsOnboardingStatus<
TData = Awaited<ReturnType<typeof getMetricsOnboardingStatus>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMetricsOnboardingStatus>>,
TError,
TData
>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMetricsOnboardingStatusQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Check if non-SigNoz metrics have been received
*/
export const invalidateGetMetricsOnboardingStatus = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMetricsOnboardingStatusQueryKey() },
options,
);
return queryClient;
};
/**
* This endpoint provides list of metrics with their number of samples and timeseries for the given time range
* @summary Get metrics statistics

View File

@@ -1,744 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
QueryClient,
QueryFunction,
QueryKey,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useQuery } from 'react-query';
import type { ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
GetRuleHistoryFilterKeys200,
GetRuleHistoryFilterKeysParams,
GetRuleHistoryFilterKeysPathParameters,
GetRuleHistoryFilterValues200,
GetRuleHistoryFilterValuesParams,
GetRuleHistoryFilterValuesPathParameters,
GetRuleHistoryOverallStatus200,
GetRuleHistoryOverallStatusParams,
GetRuleHistoryOverallStatusPathParameters,
GetRuleHistoryStats200,
GetRuleHistoryStatsParams,
GetRuleHistoryStatsPathParameters,
GetRuleHistoryTimeline200,
GetRuleHistoryTimelineParams,
GetRuleHistoryTimelinePathParameters,
GetRuleHistoryTopContributors200,
GetRuleHistoryTopContributorsParams,
GetRuleHistoryTopContributorsPathParameters,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
/**
* Returns distinct label keys from rule history entries for the selected range.
* @summary Get rule history filter keys
*/
export const getRuleHistoryFilterKeys = (
{ id }: GetRuleHistoryFilterKeysPathParameters,
params?: GetRuleHistoryFilterKeysParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetRuleHistoryFilterKeys200>({
url: `/api/v2/rules/${id}/history/filter_keys`,
method: 'GET',
params,
signal,
});
};
export const getGetRuleHistoryFilterKeysQueryKey = (
{ id }: GetRuleHistoryFilterKeysPathParameters,
params?: GetRuleHistoryFilterKeysParams,
) => {
return [
`/api/v2/rules/${id}/history/filter_keys`,
...(params ? [params] : []),
] as const;
};
export const getGetRuleHistoryFilterKeysQueryOptions = <
TData = Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryFilterKeysPathParameters,
params?: GetRuleHistoryFilterKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetRuleHistoryFilterKeysQueryKey({ id }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>
> = ({ signal }) => getRuleHistoryFilterKeys({ id }, params, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetRuleHistoryFilterKeysQueryResult = NonNullable<
Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>
>;
export type GetRuleHistoryFilterKeysQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get rule history filter keys
*/
export function useGetRuleHistoryFilterKeys<
TData = Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryFilterKeysPathParameters,
params?: GetRuleHistoryFilterKeysParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryFilterKeys>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetRuleHistoryFilterKeysQueryOptions(
{ id },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get rule history filter keys
*/
export const invalidateGetRuleHistoryFilterKeys = async (
queryClient: QueryClient,
{ id }: GetRuleHistoryFilterKeysPathParameters,
params?: GetRuleHistoryFilterKeysParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetRuleHistoryFilterKeysQueryKey({ id }, params) },
options,
);
return queryClient;
};
/**
* Returns distinct label values for a given key from rule history entries.
* @summary Get rule history filter values
*/
export const getRuleHistoryFilterValues = (
{ id }: GetRuleHistoryFilterValuesPathParameters,
params?: GetRuleHistoryFilterValuesParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetRuleHistoryFilterValues200>({
url: `/api/v2/rules/${id}/history/filter_values`,
method: 'GET',
params,
signal,
});
};
export const getGetRuleHistoryFilterValuesQueryKey = (
{ id }: GetRuleHistoryFilterValuesPathParameters,
params?: GetRuleHistoryFilterValuesParams,
) => {
return [
`/api/v2/rules/${id}/history/filter_values`,
...(params ? [params] : []),
] as const;
};
export const getGetRuleHistoryFilterValuesQueryOptions = <
TData = Awaited<ReturnType<typeof getRuleHistoryFilterValues>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryFilterValuesPathParameters,
params?: GetRuleHistoryFilterValuesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryFilterValues>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetRuleHistoryFilterValuesQueryKey({ id }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getRuleHistoryFilterValues>>
> = ({ signal }) => getRuleHistoryFilterValues({ id }, params, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryFilterValues>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetRuleHistoryFilterValuesQueryResult = NonNullable<
Awaited<ReturnType<typeof getRuleHistoryFilterValues>>
>;
export type GetRuleHistoryFilterValuesQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get rule history filter values
*/
export function useGetRuleHistoryFilterValues<
TData = Awaited<ReturnType<typeof getRuleHistoryFilterValues>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryFilterValuesPathParameters,
params?: GetRuleHistoryFilterValuesParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryFilterValues>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetRuleHistoryFilterValuesQueryOptions(
{ id },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get rule history filter values
*/
export const invalidateGetRuleHistoryFilterValues = async (
queryClient: QueryClient,
{ id }: GetRuleHistoryFilterValuesPathParameters,
params?: GetRuleHistoryFilterValuesParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetRuleHistoryFilterValuesQueryKey({ id }, params) },
options,
);
return queryClient;
};
/**
* Returns overall firing/inactive intervals for a rule in the selected time range.
* @summary Get rule overall status timeline
*/
export const getRuleHistoryOverallStatus = (
{ id }: GetRuleHistoryOverallStatusPathParameters,
params: GetRuleHistoryOverallStatusParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetRuleHistoryOverallStatus200>({
url: `/api/v2/rules/${id}/history/overall_status`,
method: 'GET',
params,
signal,
});
};
export const getGetRuleHistoryOverallStatusQueryKey = (
{ id }: GetRuleHistoryOverallStatusPathParameters,
params?: GetRuleHistoryOverallStatusParams,
) => {
return [
`/api/v2/rules/${id}/history/overall_status`,
...(params ? [params] : []),
] as const;
};
export const getGetRuleHistoryOverallStatusQueryOptions = <
TData = Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryOverallStatusPathParameters,
params: GetRuleHistoryOverallStatusParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetRuleHistoryOverallStatusQueryKey({ id }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>
> = ({ signal }) => getRuleHistoryOverallStatus({ id }, params, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetRuleHistoryOverallStatusQueryResult = NonNullable<
Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>
>;
export type GetRuleHistoryOverallStatusQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get rule overall status timeline
*/
export function useGetRuleHistoryOverallStatus<
TData = Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryOverallStatusPathParameters,
params: GetRuleHistoryOverallStatusParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryOverallStatus>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetRuleHistoryOverallStatusQueryOptions(
{ id },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get rule overall status timeline
*/
export const invalidateGetRuleHistoryOverallStatus = async (
queryClient: QueryClient,
{ id }: GetRuleHistoryOverallStatusPathParameters,
params: GetRuleHistoryOverallStatusParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetRuleHistoryOverallStatusQueryKey({ id }, params) },
options,
);
return queryClient;
};
/**
* Returns trigger and resolution statistics for a rule in the selected time range.
* @summary Get rule history stats
*/
export const getRuleHistoryStats = (
{ id }: GetRuleHistoryStatsPathParameters,
params: GetRuleHistoryStatsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetRuleHistoryStats200>({
url: `/api/v2/rules/${id}/history/stats`,
method: 'GET',
params,
signal,
});
};
export const getGetRuleHistoryStatsQueryKey = (
{ id }: GetRuleHistoryStatsPathParameters,
params?: GetRuleHistoryStatsParams,
) => {
return [
`/api/v2/rules/${id}/history/stats`,
...(params ? [params] : []),
] as const;
};
export const getGetRuleHistoryStatsQueryOptions = <
TData = Awaited<ReturnType<typeof getRuleHistoryStats>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryStatsPathParameters,
params: GetRuleHistoryStatsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryStats>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetRuleHistoryStatsQueryKey({ id }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getRuleHistoryStats>>
> = ({ signal }) => getRuleHistoryStats({ id }, params, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryStats>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetRuleHistoryStatsQueryResult = NonNullable<
Awaited<ReturnType<typeof getRuleHistoryStats>>
>;
export type GetRuleHistoryStatsQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get rule history stats
*/
export function useGetRuleHistoryStats<
TData = Awaited<ReturnType<typeof getRuleHistoryStats>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryStatsPathParameters,
params: GetRuleHistoryStatsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryStats>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetRuleHistoryStatsQueryOptions(
{ id },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get rule history stats
*/
export const invalidateGetRuleHistoryStats = async (
queryClient: QueryClient,
{ id }: GetRuleHistoryStatsPathParameters,
params: GetRuleHistoryStatsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetRuleHistoryStatsQueryKey({ id }, params) },
options,
);
return queryClient;
};
/**
* Returns paginated timeline entries for rule state transitions.
* @summary Get rule history timeline
*/
export const getRuleHistoryTimeline = (
{ id }: GetRuleHistoryTimelinePathParameters,
params: GetRuleHistoryTimelineParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetRuleHistoryTimeline200>({
url: `/api/v2/rules/${id}/history/timeline`,
method: 'GET',
params,
signal,
});
};
export const getGetRuleHistoryTimelineQueryKey = (
{ id }: GetRuleHistoryTimelinePathParameters,
params?: GetRuleHistoryTimelineParams,
) => {
return [
`/api/v2/rules/${id}/history/timeline`,
...(params ? [params] : []),
] as const;
};
export const getGetRuleHistoryTimelineQueryOptions = <
TData = Awaited<ReturnType<typeof getRuleHistoryTimeline>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryTimelinePathParameters,
params: GetRuleHistoryTimelineParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryTimeline>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetRuleHistoryTimelineQueryKey({ id }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getRuleHistoryTimeline>>
> = ({ signal }) => getRuleHistoryTimeline({ id }, params, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryTimeline>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetRuleHistoryTimelineQueryResult = NonNullable<
Awaited<ReturnType<typeof getRuleHistoryTimeline>>
>;
export type GetRuleHistoryTimelineQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get rule history timeline
*/
export function useGetRuleHistoryTimeline<
TData = Awaited<ReturnType<typeof getRuleHistoryTimeline>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryTimelinePathParameters,
params: GetRuleHistoryTimelineParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryTimeline>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetRuleHistoryTimelineQueryOptions(
{ id },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get rule history timeline
*/
export const invalidateGetRuleHistoryTimeline = async (
queryClient: QueryClient,
{ id }: GetRuleHistoryTimelinePathParameters,
params: GetRuleHistoryTimelineParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetRuleHistoryTimelineQueryKey({ id }, params) },
options,
);
return queryClient;
};
/**
* Returns top label combinations contributing to rule firing in the selected time range.
* @summary Get top contributors to rule firing
*/
export const getRuleHistoryTopContributors = (
{ id }: GetRuleHistoryTopContributorsPathParameters,
params: GetRuleHistoryTopContributorsParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetRuleHistoryTopContributors200>({
url: `/api/v2/rules/${id}/history/top_contributors`,
method: 'GET',
params,
signal,
});
};
export const getGetRuleHistoryTopContributorsQueryKey = (
{ id }: GetRuleHistoryTopContributorsPathParameters,
params?: GetRuleHistoryTopContributorsParams,
) => {
return [
`/api/v2/rules/${id}/history/top_contributors`,
...(params ? [params] : []),
] as const;
};
export const getGetRuleHistoryTopContributorsQueryOptions = <
TData = Awaited<ReturnType<typeof getRuleHistoryTopContributors>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryTopContributorsPathParameters,
params: GetRuleHistoryTopContributorsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryTopContributors>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ??
getGetRuleHistoryTopContributorsQueryKey({ id }, params);
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getRuleHistoryTopContributors>>
> = ({ signal }) => getRuleHistoryTopContributors({ id }, params, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryTopContributors>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetRuleHistoryTopContributorsQueryResult = NonNullable<
Awaited<ReturnType<typeof getRuleHistoryTopContributors>>
>;
export type GetRuleHistoryTopContributorsQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get top contributors to rule firing
*/
export function useGetRuleHistoryTopContributors<
TData = Awaited<ReturnType<typeof getRuleHistoryTopContributors>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetRuleHistoryTopContributorsPathParameters,
params: GetRuleHistoryTopContributorsParams,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getRuleHistoryTopContributors>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetRuleHistoryTopContributorsQueryOptions(
{ id },
params,
options,
);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get top contributors to rule firing
*/
export const invalidateGetRuleHistoryTopContributors = async (
queryClient: QueryClient,
{ id }: GetRuleHistoryTopContributorsPathParameters,
params: GetRuleHistoryTopContributorsParams,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetRuleHistoryTopContributorsQueryKey({ id }, params) },
options,
);
return queryClient;
};

View File

@@ -23,15 +23,9 @@ import type {
CreateServiceAccount201,
CreateServiceAccountKey201,
CreateServiceAccountKeyPathParameters,
CreateServiceAccountRole201,
CreateServiceAccountRolePathParameters,
DeleteServiceAccountPathParameters,
DeleteServiceAccountRolePathParameters,
GetMyServiceAccount200,
GetServiceAccount200,
GetServiceAccountPathParameters,
GetServiceAccountRoles200,
GetServiceAccountRolesPathParameters,
ListServiceAccountKeys200,
ListServiceAccountKeysPathParameters,
ListServiceAccounts200,
@@ -39,10 +33,12 @@ import type {
RevokeServiceAccountKeyPathParameters,
ServiceaccounttypesPostableFactorAPIKeyDTO,
ServiceaccounttypesPostableServiceAccountDTO,
ServiceaccounttypesPostableServiceAccountRoleDTO,
ServiceaccounttypesUpdatableFactorAPIKeyDTO,
ServiceaccounttypesUpdatableServiceAccountDTO,
ServiceaccounttypesUpdatableServiceAccountStatusDTO,
UpdateServiceAccountKeyPathParameters,
UpdateServiceAccountPathParameters,
UpdateServiceAccountStatusPathParameters,
} from '../sigNoz.schemas';
/**
@@ -403,13 +399,13 @@ export const invalidateGetServiceAccount = async (
*/
export const updateServiceAccount = (
{ id }: UpdateServiceAccountPathParameters,
serviceaccounttypesPostableServiceAccountDTO: BodyType<ServiceaccounttypesPostableServiceAccountDTO>,
serviceaccounttypesUpdatableServiceAccountDTO: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>,
) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/service_accounts/${id}`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: serviceaccounttypesPostableServiceAccountDTO,
data: serviceaccounttypesUpdatableServiceAccountDTO,
});
};
@@ -422,7 +418,7 @@ export const getUpdateServiceAccountMutationOptions = <
TError,
{
pathParams: UpdateServiceAccountPathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
},
TContext
>;
@@ -431,7 +427,7 @@ export const getUpdateServiceAccountMutationOptions = <
TError,
{
pathParams: UpdateServiceAccountPathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
},
TContext
> => {
@@ -448,7 +444,7 @@ export const getUpdateServiceAccountMutationOptions = <
Awaited<ReturnType<typeof updateServiceAccount>>,
{
pathParams: UpdateServiceAccountPathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
}
> = (props) => {
const { pathParams, data } = props ?? {};
@@ -462,7 +458,7 @@ export const getUpdateServiceAccountMutationOptions = <
export type UpdateServiceAccountMutationResult = NonNullable<
Awaited<ReturnType<typeof updateServiceAccount>>
>;
export type UpdateServiceAccountMutationBody = BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
export type UpdateServiceAccountMutationBody = BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
export type UpdateServiceAccountMutationError = ErrorType<RenderErrorResponseDTO>;
/**
@@ -477,7 +473,7 @@ export const useUpdateServiceAccount = <
TError,
{
pathParams: UpdateServiceAccountPathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
},
TContext
>;
@@ -486,7 +482,7 @@ export const useUpdateServiceAccount = <
TError,
{
pathParams: UpdateServiceAccountPathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountDTO>;
},
TContext
> => {
@@ -875,150 +871,44 @@ export const useUpdateServiceAccountKey = <
return useMutation(mutationOptions);
};
/**
* This endpoint gets all the roles for the existing service account
* @summary Gets service account roles
* This endpoint updates an existing service account status
* @summary Updates a service account status
*/
export const getServiceAccountRoles = (
{ id }: GetServiceAccountRolesPathParameters,
signal?: AbortSignal,
export const updateServiceAccountStatus = (
{ id }: UpdateServiceAccountStatusPathParameters,
serviceaccounttypesUpdatableServiceAccountStatusDTO: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>,
) => {
return GeneratedAPIInstance<GetServiceAccountRoles200>({
url: `/api/v1/service_accounts/${id}/roles`,
method: 'GET',
signal,
});
};
export const getGetServiceAccountRolesQueryKey = ({
id,
}: GetServiceAccountRolesPathParameters) => {
return [`/api/v1/service_accounts/${id}/roles`] as const;
};
export const getGetServiceAccountRolesQueryOptions = <
TData = Awaited<ReturnType<typeof getServiceAccountRoles>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetServiceAccountRolesPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getServiceAccountRoles>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetServiceAccountRolesQueryKey({ id });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getServiceAccountRoles>>
> = ({ signal }) => getServiceAccountRoles({ id }, signal);
return {
queryKey,
queryFn,
enabled: !!id,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getServiceAccountRoles>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetServiceAccountRolesQueryResult = NonNullable<
Awaited<ReturnType<typeof getServiceAccountRoles>>
>;
export type GetServiceAccountRolesQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Gets service account roles
*/
export function useGetServiceAccountRoles<
TData = Awaited<ReturnType<typeof getServiceAccountRoles>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ id }: GetServiceAccountRolesPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getServiceAccountRoles>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetServiceAccountRolesQueryOptions({ id }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Gets service account roles
*/
export const invalidateGetServiceAccountRoles = async (
queryClient: QueryClient,
{ id }: GetServiceAccountRolesPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetServiceAccountRolesQueryKey({ id }) },
options,
);
return queryClient;
};
/**
* This endpoint assigns a role to a service account
* @summary Create service account role
*/
export const createServiceAccountRole = (
{ id }: CreateServiceAccountRolePathParameters,
serviceaccounttypesPostableServiceAccountRoleDTO: BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<CreateServiceAccountRole201>({
url: `/api/v1/service_accounts/${id}/roles`,
method: 'POST',
return GeneratedAPIInstance<string>({
url: `/api/v1/service_accounts/${id}/status`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: serviceaccounttypesPostableServiceAccountRoleDTO,
signal,
data: serviceaccounttypesUpdatableServiceAccountStatusDTO,
});
};
export const getCreateServiceAccountRoleMutationOptions = <
export const getUpdateServiceAccountStatusMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof createServiceAccountRole>>,
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
TError,
{
pathParams: CreateServiceAccountRolePathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>;
pathParams: UpdateServiceAccountStatusPathParameters;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
},
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof createServiceAccountRole>>,
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
TError,
{
pathParams: CreateServiceAccountRolePathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>;
pathParams: UpdateServiceAccountStatusPathParameters;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
},
TContext
> => {
const mutationKey = ['createServiceAccountRole'];
const mutationKey = ['updateServiceAccountStatus'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
@@ -1028,299 +918,52 @@ export const getCreateServiceAccountRoleMutationOptions = <
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof createServiceAccountRole>>,
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
{
pathParams: CreateServiceAccountRolePathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>;
pathParams: UpdateServiceAccountStatusPathParameters;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
}
> = (props) => {
const { pathParams, data } = props ?? {};
return createServiceAccountRole(pathParams, data);
return updateServiceAccountStatus(pathParams, data);
};
return { mutationFn, ...mutationOptions };
};
export type CreateServiceAccountRoleMutationResult = NonNullable<
Awaited<ReturnType<typeof createServiceAccountRole>>
export type UpdateServiceAccountStatusMutationResult = NonNullable<
Awaited<ReturnType<typeof updateServiceAccountStatus>>
>;
export type CreateServiceAccountRoleMutationBody = BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>;
export type CreateServiceAccountRoleMutationError = ErrorType<RenderErrorResponseDTO>;
export type UpdateServiceAccountStatusMutationBody = BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
export type UpdateServiceAccountStatusMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Create service account role
* @summary Updates a service account status
*/
export const useCreateServiceAccountRole = <
export const useUpdateServiceAccountStatus = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof createServiceAccountRole>>,
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
TError,
{
pathParams: CreateServiceAccountRolePathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>;
pathParams: UpdateServiceAccountStatusPathParameters;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
},
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof createServiceAccountRole>>,
Awaited<ReturnType<typeof updateServiceAccountStatus>>,
TError,
{
pathParams: CreateServiceAccountRolePathParameters;
data: BodyType<ServiceaccounttypesPostableServiceAccountRoleDTO>;
pathParams: UpdateServiceAccountStatusPathParameters;
data: BodyType<ServiceaccounttypesUpdatableServiceAccountStatusDTO>;
},
TContext
> => {
const mutationOptions = getCreateServiceAccountRoleMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint revokes a role from service account
* @summary Delete service account role
*/
export const deleteServiceAccountRole = ({
id,
rid,
}: DeleteServiceAccountRolePathParameters) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/service_accounts/${id}/roles/${rid}`,
method: 'DELETE',
});
};
export const getDeleteServiceAccountRoleMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteServiceAccountRole>>,
TError,
{ pathParams: DeleteServiceAccountRolePathParameters },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteServiceAccountRole>>,
TError,
{ pathParams: DeleteServiceAccountRolePathParameters },
TContext
> => {
const mutationKey = ['deleteServiceAccountRole'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteServiceAccountRole>>,
{ pathParams: DeleteServiceAccountRolePathParameters }
> = (props) => {
const { pathParams } = props ?? {};
return deleteServiceAccountRole(pathParams);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteServiceAccountRoleMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteServiceAccountRole>>
>;
export type DeleteServiceAccountRoleMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Delete service account role
*/
export const useDeleteServiceAccountRole = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteServiceAccountRole>>,
TError,
{ pathParams: DeleteServiceAccountRolePathParameters },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof deleteServiceAccountRole>>,
TError,
{ pathParams: DeleteServiceAccountRolePathParameters },
TContext
> => {
const mutationOptions = getDeleteServiceAccountRoleMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint gets my service account
* @summary Gets my service account
*/
export const getMyServiceAccount = (signal?: AbortSignal) => {
return GeneratedAPIInstance<GetMyServiceAccount200>({
url: `/api/v1/service_accounts/me`,
method: 'GET',
signal,
});
};
export const getGetMyServiceAccountQueryKey = () => {
return [`/api/v1/service_accounts/me`] as const;
};
export const getGetMyServiceAccountQueryOptions = <
TData = Awaited<ReturnType<typeof getMyServiceAccount>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMyServiceAccount>>,
TError,
TData
>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetMyServiceAccountQueryKey();
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getMyServiceAccount>>
> = ({ signal }) => getMyServiceAccount(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof getMyServiceAccount>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetMyServiceAccountQueryResult = NonNullable<
Awaited<ReturnType<typeof getMyServiceAccount>>
>;
export type GetMyServiceAccountQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Gets my service account
*/
export function useGetMyServiceAccount<
TData = Awaited<ReturnType<typeof getMyServiceAccount>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getMyServiceAccount>>,
TError,
TData
>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetMyServiceAccountQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Gets my service account
*/
export const invalidateGetMyServiceAccount = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetMyServiceAccountQueryKey() },
options,
);
return queryClient;
};
/**
* This endpoint gets my service account
* @summary Updates my service account
*/
export const updateMyServiceAccount = (
serviceaccounttypesPostableServiceAccountDTO: BodyType<ServiceaccounttypesPostableServiceAccountDTO>,
) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/service_accounts/me`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: serviceaccounttypesPostableServiceAccountDTO,
});
};
export const getUpdateMyServiceAccountMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof updateMyServiceAccount>>,
TError,
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof updateMyServiceAccount>>,
TError,
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
TContext
> => {
const mutationKey = ['updateMyServiceAccount'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof updateMyServiceAccount>>,
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> }
> = (props) => {
const { data } = props ?? {};
return updateMyServiceAccount(data);
};
return { mutationFn, ...mutationOptions };
};
export type UpdateMyServiceAccountMutationResult = NonNullable<
Awaited<ReturnType<typeof updateMyServiceAccount>>
>;
export type UpdateMyServiceAccountMutationBody = BodyType<ServiceaccounttypesPostableServiceAccountDTO>;
export type UpdateMyServiceAccountMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Updates my service account
*/
export const useUpdateMyServiceAccount = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof updateMyServiceAccount>>,
TError,
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof updateMyServiceAccount>>,
TError,
{ data: BodyType<ServiceaccounttypesPostableServiceAccountDTO> },
TContext
> => {
const mutationOptions = getUpdateMyServiceAccountMutationOptions(options);
const mutationOptions = getUpdateServiceAccountStatusMutationOptions(options);
return useMutation(mutationOptions);
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,54 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
export interface InspectMetricsRequest {
metricName: string;
start: number;
end: number;
filters: TagFilter;
}
export interface InspectMetricsResponse {
status: string;
data: {
series: InspectMetricsSeries[];
};
}
export interface InspectMetricsSeries {
title?: string;
strokeColor?: string;
labels: Record<string, string>;
labelsArray: Array<Record<string, string>>;
values: InspectMetricsTimestampValue[];
}
interface InspectMetricsTimestampValue {
timestamp: number;
value: string;
}
export const getInspectMetricsDetails = async (
request: InspectMetricsRequest,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<InspectMetricsResponse> | ErrorResponse> => {
try {
const response = await axios.post(`/metrics/inspect`, request, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,75 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { MetricType } from './getMetricsList';
export interface MetricDetails {
name: string;
description: string;
type: string;
unit: string;
timeseries: number;
samples: number;
timeSeriesTotal: number;
timeSeriesActive: number;
lastReceived: string;
attributes: MetricDetailsAttribute[] | null;
metadata?: {
metric_type: MetricType;
description: string;
unit: string;
temporality?: Temporality;
};
alerts: MetricDetailsAlert[] | null;
dashboards: MetricDetailsDashboard[] | null;
}
export enum Temporality {
CUMULATIVE = 'Cumulative',
DELTA = 'Delta',
}
export interface MetricDetailsAttribute {
key: string;
value: string[];
valueCount: number;
}
export interface MetricDetailsAlert {
alert_name: string;
alert_id: string;
}
export interface MetricDetailsDashboard {
dashboard_name: string;
dashboard_id: string;
}
export interface MetricDetailsResponse {
status: string;
data: MetricDetails;
}
export const getMetricDetails = async (
metricName: string,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<MetricDetailsResponse> | ErrorResponse> => {
try {
const response = await axios.get(`/metrics/${metricName}/metadata`, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: 'Success',
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,67 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import {
OrderByPayload,
TreemapViewType,
} from 'container/MetricsExplorer/Summary/types';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
export interface MetricsListPayload {
filters: TagFilter;
groupBy?: BaseAutocompleteData[];
offset?: number;
limit?: number;
orderBy?: OrderByPayload;
}
export enum MetricType {
SUM = 'Sum',
GAUGE = 'Gauge',
HISTOGRAM = 'Histogram',
SUMMARY = 'Summary',
EXPONENTIAL_HISTOGRAM = 'ExponentialHistogram',
}
export interface MetricsListItemData {
metric_name: string;
description: string;
type: MetricType;
unit: string;
[TreemapViewType.TIMESERIES]: number;
[TreemapViewType.SAMPLES]: number;
lastReceived: string;
}
export interface MetricsListResponse {
status: string;
data: {
metrics: MetricsListItemData[];
total?: number;
};
}
export const getMetricsList = async (
props: MetricsListPayload,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<MetricsListResponse> | ErrorResponse> => {
try {
const response = await axios.post('/metrics', props, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
params: props,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,44 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
export interface MetricsListFilterKeysResponse {
status: string;
data: {
metricColumns: string[];
attributeKeys: BaseAutocompleteData[];
};
}
export interface GetMetricsListFilterKeysParams {
searchText: string;
limit?: number;
}
export const getMetricsListFilterKeys = async (
params: GetMetricsListFilterKeysParams,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<MetricsListFilterKeysResponse> | ErrorResponse> => {
try {
const response = await axios.get('/metrics/filters/keys', {
params: {
searchText: params.searchText,
limit: params.limit,
},
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,43 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
export interface MetricsListFilterValuesPayload {
filterAttributeKeyDataType: string;
filterKey: string;
searchText: string;
limit: number;
}
export interface MetricsListFilterValuesResponse {
status: string;
data: {
filterValues: string[];
};
}
export const getMetricsListFilterValues = async (
props: MetricsListFilterValuesPayload,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<
SuccessResponse<MetricsListFilterValuesResponse> | ErrorResponse
> => {
try {
const response = await axios.post('/metrics/filters/values', props, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
params: props,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -0,0 +1,60 @@
import axios from 'api';
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
import { AxiosError } from 'axios';
import { ErrorResponse, SuccessResponse } from 'types/api';
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
export interface RelatedMetricsPayload {
start: number;
end: number;
currentMetricName: string;
}
export interface RelatedMetricDashboard {
dashboard_name: string;
dashboard_id: string;
widget_id: string;
widget_name: string;
}
export interface RelatedMetricAlert {
alert_name: string;
alert_id: string;
}
export interface RelatedMetric {
name: string;
query: IBuilderQuery;
dashboards: RelatedMetricDashboard[];
alerts: RelatedMetricAlert[];
}
export interface RelatedMetricsResponse {
status: 'success';
data: {
related_metrics: RelatedMetric[];
};
}
export const getRelatedMetrics = async (
props: RelatedMetricsPayload,
signal?: AbortSignal,
headers?: Record<string, string>,
): Promise<SuccessResponse<RelatedMetricsResponse> | ErrorResponse> => {
try {
const response = await axios.post('/metrics/related', props, {
signal,
headers,
});
return {
statusCode: 200,
error: null,
message: response.data.status,
payload: response.data,
params: props,
};
} catch (error) {
return ErrorResponseHandler(error as AxiosError);
}
};

View File

@@ -8,32 +8,42 @@ export const downloadExportData = async (
props: ExportRawDataProps,
): Promise<void> => {
try {
const response = await axios.post<Blob>(
`export_raw_data?format=${encodeURIComponent(props.format)}`,
props.body,
{
responseType: 'blob',
decompress: true,
headers: {
Accept: 'application/octet-stream',
'Content-Type': 'application/json',
},
timeout: 0,
},
);
const queryParams = new URLSearchParams();
queryParams.append('start', String(props.start));
queryParams.append('end', String(props.end));
queryParams.append('filter', props.filter);
props.columns.forEach((col) => {
queryParams.append('columns', col);
});
queryParams.append('order_by', props.orderBy);
queryParams.append('limit', String(props.limit));
queryParams.append('format', props.format);
const response = await axios.get<Blob>(`export_raw_data?${queryParams}`, {
responseType: 'blob', // Important: tell axios to handle response as blob
decompress: true, // Enable automatic decompression
headers: {
Accept: 'application/octet-stream', // Tell server we expect binary data
},
timeout: 0,
});
// Only proceed if the response status is 200
if (response.status !== 200) {
throw new Error(
`Failed to download data: server returned status ${response.status}`,
);
}
// Create blob URL from response data
const blob = new Blob([response.data], { type: 'application/octet-stream' });
const url = window.URL.createObjectURL(blob);
// Create and configure download link
const link = document.createElement('a');
link.href = url;
// Get filename from Content-Disposition header or generate timestamped default
const filename =
response.headers['content-disposition']
?.split('filename=')[1]
@@ -41,6 +51,7 @@ export const downloadExportData = async (
link.setAttribute('download', filename);
// Trigger download
document.body.appendChild(link);
link.click();
link.remove();

View File

@@ -0,0 +1,19 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, PendingInvite } from 'types/api/user/getPendingInvites';
const get = async (): Promise<SuccessResponseV2<PendingInvite[]>> => {
try {
const response = await axios.get<PayloadProps>(`/invite`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default get;

View File

@@ -0,0 +1,22 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/accept';
import { UserResponse } from 'types/api/user/getUser';
const accept = async (
props: Props,
): Promise<SuccessResponseV2<UserResponse>> => {
try {
const response = await axios.post<PayloadProps>(`/invite/accept`, props);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default accept;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/user/deleteInvite';
const del = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete(`/invite/${props.id}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default del;

View File

@@ -0,0 +1,28 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
InviteDetails,
PayloadProps,
Props,
} from 'types/api/user/getInviteDetails';
const getInviteDetails = async (
props: Props,
): Promise<SuccessResponseV2<InviteDetails>> => {
try {
const response = await axios.get<PayloadProps>(
`/invite/${props.inviteId}?ref=${window.location.href}`,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getInviteDetails;

View File

@@ -0,0 +1,28 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
APIKeyProps,
CreateAPIKeyProps,
CreatePayloadProps,
} from 'types/api/pat/types';
const create = async (
props: CreateAPIKeyProps,
): Promise<SuccessResponseV2<APIKeyProps>> => {
try {
const response = await axios.post<CreatePayloadProps>('/pats', {
...props,
});
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default create;

View File

@@ -0,0 +1,19 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
const deleteAPIKey = async (id: string): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete(`/pats/${id}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteAPIKey;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { AllAPIKeyProps, APIKeyProps } from 'types/api/pat/types';
const list = async (): Promise<SuccessResponseV2<APIKeyProps[]>> => {
try {
const response = await axios.get<AllAPIKeyProps>('/pats');
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default list;

View File

@@ -0,0 +1,24 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { UpdateAPIKeyProps } from 'types/api/pat/types';
const updateAPIKey = async (
props: UpdateAPIKeyProps,
): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.put(`/pats/${props.id}`, {
...props.data,
});
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default updateAPIKey;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/user/deleteUser';
const deleteUser = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete(`/user/${props.userId}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteUser;

View File

@@ -1,97 +0,0 @@
.announcement-banner {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--spacing-4);
padding: var(--padding-2) var(--padding-4);
height: 40px;
font-family: var(--font-sans), sans-serif;
font-size: var(--label-base-500-font-size);
line-height: var(--label-base-500-line-height);
font-weight: var(--label-base-500-font-weight);
letter-spacing: -0.065px;
&--warning {
background-color: var(--callout-warning-background);
color: var(--callout-warning-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-warning-border);
}
}
&--info {
background-color: var(--callout-primary-background);
color: var(--callout-primary-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-primary-border);
}
}
&--error {
background-color: var(--callout-error-background);
color: var(--callout-error-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-error-border);
}
}
&--success {
background-color: var(--callout-success-background);
color: var(--callout-success-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-success-border);
}
}
&__body {
display: flex;
align-items: center;
gap: var(--spacing-4);
flex: 1;
min-width: 0;
}
&__icon {
display: flex;
align-items: center;
flex-shrink: 0;
}
&__message {
flex: 1;
min-width: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
line-height: var(--line-height-normal);
strong {
font-weight: var(--font-weight-semibold);
}
}
&__action {
height: 24px;
font-size: var(--label-small-500-font-size);
color: currentColor;
&:hover {
opacity: 0.8;
}
}
&__dismiss {
width: 24px;
height: 24px;
padding: 0;
color: currentColor;
&:hover {
opacity: 0.8;
}
}
}

View File

@@ -1,89 +0,0 @@
import { render, screen, userEvent } from 'tests/test-utils';
import {
AnnouncementBanner,
AnnouncementBannerProps,
PersistedAnnouncementBanner,
} from './index';
const STORAGE_KEY = 'test-banner-dismissed';
function renderBanner(props: Partial<AnnouncementBannerProps> = {}): void {
render(<AnnouncementBanner message="Test message" {...props} />);
}
afterEach(() => {
localStorage.removeItem(STORAGE_KEY);
});
describe('AnnouncementBanner', () => {
it('renders message and default warning variant', () => {
renderBanner({ message: <strong>Heads up</strong> });
const alert = screen.getByRole('alert');
expect(alert).toHaveClass('announcement-banner--warning');
expect(alert).toHaveTextContent('Heads up');
});
it.each(['warning', 'info', 'success', 'error'] as const)(
'renders %s variant correctly',
(type) => {
renderBanner({ type, message: 'Test message' });
const alert = screen.getByRole('alert');
expect(alert).toHaveClass(`announcement-banner--${type}`);
},
);
it('calls action onClick when action button is clicked', async () => {
const onClick = jest.fn() as jest.MockedFunction<() => void>;
renderBanner({ action: { label: 'Go to Settings', onClick } });
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /go to settings/i }));
expect(onClick).toHaveBeenCalledTimes(1);
});
it('hides dismiss button when onClose is not provided and hides icon when icon is null', () => {
renderBanner({ onClose: undefined, icon: null });
expect(
screen.queryByRole('button', { name: /dismiss/i }),
).not.toBeInTheDocument();
expect(
screen.queryByRole('alert')?.querySelector('.announcement-banner__icon'),
).not.toBeInTheDocument();
});
});
describe('PersistedAnnouncementBanner', () => {
it('dismisses on click, calls onDismiss, and persists to localStorage', async () => {
const onDismiss = jest.fn() as jest.MockedFunction<() => void>;
render(
<PersistedAnnouncementBanner
message="Test message"
storageKey={STORAGE_KEY}
onDismiss={onDismiss}
/>,
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /dismiss/i }));
expect(screen.queryByRole('alert')).not.toBeInTheDocument();
expect(onDismiss).toHaveBeenCalledTimes(1);
expect(localStorage.getItem(STORAGE_KEY)).toBe('true');
});
it('does not render when storageKey is already set in localStorage', () => {
localStorage.setItem(STORAGE_KEY, 'true');
render(
<PersistedAnnouncementBanner
message="Test message"
storageKey={STORAGE_KEY}
/>,
);
expect(screen.queryByRole('alert')).not.toBeInTheDocument();
});
});

View File

@@ -1,84 +0,0 @@
import { ReactNode } from 'react';
import { Button } from '@signozhq/button';
import {
CircleAlert,
CircleCheckBig,
Info,
TriangleAlert,
X,
} from '@signozhq/icons';
import cx from 'classnames';
import './AnnouncementBanner.styles.scss';
export type AnnouncementBannerType = 'warning' | 'info' | 'error' | 'success';
export interface AnnouncementBannerAction {
label: string;
onClick: () => void;
}
export interface AnnouncementBannerProps {
message: ReactNode;
type?: AnnouncementBannerType;
icon?: ReactNode | null;
action?: AnnouncementBannerAction;
onClose?: () => void;
className?: string;
}
const DEFAULT_ICONS: Record<AnnouncementBannerType, ReactNode> = {
warning: <TriangleAlert size={14} />,
info: <Info size={14} />,
error: <CircleAlert size={14} />,
success: <CircleCheckBig size={14} />,
};
export default function AnnouncementBanner({
message,
type = 'warning',
icon,
action,
onClose,
className,
}: AnnouncementBannerProps): JSX.Element {
const resolvedIcon = icon === null ? null : icon ?? DEFAULT_ICONS[type];
return (
<div
role="alert"
className={cx(
'announcement-banner',
`announcement-banner--${type}`,
className,
)}
>
<div className="announcement-banner__body">
{resolvedIcon && (
<span className="announcement-banner__icon">{resolvedIcon}</span>
)}
<span className="announcement-banner__message">{message}</span>
{action && (
<Button
type="button"
className="announcement-banner__action"
onClick={action.onClick}
>
{action.label}
</Button>
)}
</div>
{onClose && (
<Button
type="button"
aria-label="Dismiss"
className="announcement-banner__dismiss"
onClick={onClose}
>
<X size={14} />
</Button>
)}
</div>
);
}

View File

@@ -1,34 +0,0 @@
import { useState } from 'react';
import AnnouncementBanner, {
AnnouncementBannerProps,
} from './AnnouncementBanner';
interface PersistedAnnouncementBannerProps extends AnnouncementBannerProps {
storageKey: string;
onDismiss?: () => void;
}
function isDismissed(storageKey: string): boolean {
return localStorage.getItem(storageKey) === 'true';
}
export default function PersistedAnnouncementBanner({
storageKey,
onDismiss,
...props
}: PersistedAnnouncementBannerProps): JSX.Element | null {
const [visible, setVisible] = useState(() => !isDismissed(storageKey));
if (!visible) {
return null;
}
const handleClose = (): void => {
localStorage.setItem(storageKey, 'true');
setVisible(false);
onDismiss?.();
};
return <AnnouncementBanner {...props} onClose={handleClose} />;
}

View File

@@ -1,12 +0,0 @@
import AnnouncementBanner from './AnnouncementBanner';
import PersistedAnnouncementBanner from './PersistedAnnouncementBanner';
export type {
AnnouncementBannerAction,
AnnouncementBannerProps,
AnnouncementBannerType,
} from './AnnouncementBanner';
export { AnnouncementBanner, PersistedAnnouncementBanner };
export default AnnouncementBanner;

View File

@@ -7,7 +7,7 @@ import ROUTES from 'constants/routes';
import useUpdatedQuery from 'container/GridCardLayout/useResolveQuery';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useNotifications } from 'hooks/useNotifications';
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { AppState } from 'store/reducers';
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
@@ -79,7 +79,7 @@ export function useNavigateToExplorer(): (
);
const { getUpdatedQuery } = useUpdatedQuery();
const { selectedDashboard } = useDashboardStore();
const { selectedDashboard } = useDashboard();
const { notifications } = useNotifications();
return useCallback(

View File

@@ -1,106 +0,0 @@
.create-sa-modal {
max-width: 530px;
background: var(--popover);
border: 1px solid var(--secondary);
border-radius: 4px;
box-shadow: 0 4px 9px 0 rgba(0, 0, 0, 0.04);
[data-slot='dialog-header'] {
padding: var(--padding-4);
border-bottom: 1px solid var(--secondary);
flex-shrink: 0;
background: transparent;
margin: 0;
}
[data-slot='dialog-title'] {
font-size: var(--label-base-400-font-size);
font-weight: var(--label-base-400-font-weight);
line-height: var(--label-base-400-line-height);
letter-spacing: -0.065px;
color: var(--bg-base-white);
margin: 0;
}
[data-slot='dialog-description'] {
padding: 0;
.create-sa-modal__content {
padding: var(--padding-4);
}
}
}
.create-sa-form {
display: flex;
flex-direction: column;
gap: var(--spacing-2);
&__item {
display: flex;
flex-direction: column;
gap: var(--spacing-1);
margin-bottom: var(--spacing-4);
> label {
font-size: var(--paragraph-base-400-font-size);
font-weight: var(--paragraph-base-400-font-weight);
color: var(--foreground);
letter-spacing: -0.07px;
}
}
&__input {
height: 32px;
color: var(--l1-foreground);
background-color: var(--l2-background);
border-color: var(--border);
font-size: var(--paragraph-base-400-font-size);
border-radius: 2px;
width: 100%;
&::placeholder {
color: var(--l3-foreground);
}
&:focus {
border-color: var(--primary);
box-shadow: none;
}
}
&__error {
font-size: var(--paragraph-small-400-font-size);
color: var(--destructive);
line-height: var(--paragraph-small-400-line-height);
margin: 0;
}
&__helper {
font-size: var(--paragraph-small-400-font-size);
color: var(--l3-foreground);
margin: calc(var(--spacing-2) * -1) 0 var(--spacing-4) 0;
line-height: var(--paragraph-small-400-line-height);
}
}
.create-sa-modal__footer {
display: flex;
flex-direction: row;
align-items: center;
justify-content: flex-end;
padding: 0 var(--padding-4);
height: 56px;
min-height: 56px;
border-top: 1px solid var(--secondary);
gap: var(--spacing-4);
flex-shrink: 0;
}
.lightMode {
.create-sa-modal {
[data-slot='dialog-title'] {
color: var(--bg-base-black);
}
}
}

Some files were not shown because too many files have changed in this diff Show More