Compare commits

..

34 Commits

Author SHA1 Message Date
aks07
a06046612a feat: connector line ux 2026-03-17 18:45:11 +05:30
aks07
31c9d4309b feat: move to service worker 2026-03-17 17:06:05 +05:30
aks07
7bef8b86c4 feat: subtree segregated tree 2026-03-17 14:39:57 +05:30
aks07
d26acd36a3 feat: subtree segregated tree 2026-03-17 13:55:58 +05:30
aks07
1cee595135 feat: subtree segregated tree 2026-03-17 13:29:07 +05:30
aks07
dd1868fcbc feat: row based flamegraph 2026-03-16 23:27:31 +05:30
aks07
a20beb8ba2 feat: span hover popover sync 2026-03-16 14:18:12 +05:30
aks07
998d652feb feat: fix hover option overflow 2026-03-16 10:19:42 +05:30
aks07
3695d3c180 feat: match span style 2026-03-13 15:53:46 +05:30
aks07
da175bafbc feat: add TimelineV3 ruler to waterfall header with padding fix
Add the TimelineV3 component to the sticky header of the waterfall's
right panel so timeline tick marks are visible. Add horizontal padding
to both the timeline header and span duration bars to prevent label
overflow/clipping at the edges.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-12 16:12:49 +05:30
aks07
021b187cbc feat: decouple waterfall left (span tree) and right (timeline bars) panels
Split the waterfall into two independent panels with a shared virtualizer
so deeply nested span names are visible via horizontal scroll in the left
panel. Left panel uses useReactTable + <table> for future column
extensibility; right panel uses plain divs for timeline bars. A draggable
resize handle separates the two panels.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-12 16:04:32 +05:30
aks07
f42b468597 feat: waterfall init 2026-03-11 17:15:27 +05:30
aks07
7e2cf57819 Merge branch 'main' of github.com:SigNoz/signoz into feat/trace-flamegraph 2026-03-11 12:24:06 +05:30
aks07
dc9ebc5b26 feat: add test utils 2026-03-11 03:31:10 +05:30
aks07
398ab6e9d9 feat: add test cases for flamegraph 2026-03-11 03:28:35 +05:30
aks07
fec60671d8 feat: minor comment added 2026-03-11 03:03:49 +05:30
aks07
99259cc4e8 feat: remove unnecessary props 2026-03-10 16:01:24 +05:30
aks07
ca311717c2 feat: bg color for selected and hover spans 2026-03-06 23:16:35 +05:30
aks07
a614da2c65 fix: update color 2026-03-06 20:22:23 +05:30
aks07
ce18709002 fix: style fix 2026-03-06 20:03:42 +05:30
aks07
2b6977e891 feat: reduce timeline intervals 2026-03-06 20:01:50 +05:30
aks07
3e6eedbcab feat: fix style 2026-03-06 19:57:19 +05:30
aks07
fd9e3f0411 feat: fix style 2026-03-06 19:26:23 +05:30
aks07
e99465e030 Merge branch 'main' of github.com:SigNoz/signoz into feat/trace-flamegraph 2026-03-06 18:45:18 +05:30
aks07
9ad2db4b99 feat: scroll to selected span 2026-03-06 16:00:48 +05:30
aks07
07fd5f70ef feat: fix timerange unit selection when zoomed 2026-03-06 12:55:34 +05:30
aks07
ba79121795 feat: temp change 2026-03-06 12:49:06 +05:30
aks07
6e4e419b5e Merge branch 'main' of github.com:SigNoz/signoz into feat/trace-flamegraph 2026-03-06 10:30:41 +05:30
aks07
2f06afaf27 feat: handle click and hover with tooltip 2026-03-05 23:25:10 +05:30
aks07
f77c3cb23c feat: update span colors 2026-03-05 22:40:06 +05:30
aks07
9e3a8efcfc feat: zoom and drag added 2026-03-05 18:22:55 +05:30
aks07
8e325ba8b3 feat: added timeline v3 2026-03-05 12:31:17 +05:30
aks07
884f516766 feat: add text to spans 2026-03-03 12:20:06 +05:30
aks07
4bcbb4ffc3 feat: flamegraph canvas init 2026-03-02 19:21:23 +05:30
711 changed files with 19228 additions and 34159 deletions

View File

@@ -1,22 +1,4 @@
services:
init-clickhouse:
image: clickhouse/clickhouse-server:25.5.6
container_name: init-clickhouse
command:
- bash
- -c
- |
version="v0.0.1"
node_os=$$(uname -s | tr '[:upper:]' '[:lower:]')
node_arch=$$(uname -m | sed s/aarch64/arm64/ | sed s/x86_64/amd64/)
echo "Fetching histogram-binary for $${node_os}/$${node_arch}"
cd /tmp
wget -O histogram-quantile.tar.gz "https://github.com/SigNoz/signoz/releases/download/histogram-quantile%2F$${version}/histogram-quantile_$${node_os}_$${node_arch}.tar.gz"
tar -xvzf histogram-quantile.tar.gz
mv histogram-quantile /var/lib/clickhouse/user_scripts/histogramQuantile
restart: on-failure
volumes:
- ${PWD}/fs/tmp/var/lib/clickhouse/user_scripts/:/var/lib/clickhouse/user_scripts/
clickhouse:
image: clickhouse/clickhouse-server:25.5.6
container_name: clickhouse
@@ -25,7 +7,6 @@ services:
- ${PWD}/fs/etc/clickhouse-server/users.d/users.xml:/etc/clickhouse-server/users.d/users.xml
- ${PWD}/fs/tmp/var/lib/clickhouse/:/var/lib/clickhouse/
- ${PWD}/fs/tmp/var/lib/clickhouse/user_scripts/:/var/lib/clickhouse/user_scripts/
- ${PWD}/../../../deploy/common/clickhouse/custom-function.xml:/etc/clickhouse-server/custom-function.xml
ports:
- '127.0.0.1:8123:8123'
- '127.0.0.1:9000:9000'
@@ -41,10 +22,7 @@ services:
timeout: 5s
retries: 3
depends_on:
init-clickhouse:
condition: service_completed_successfully
zookeeper:
condition: service_healthy
- zookeeper
environment:
- CLICKHOUSE_SKIP_USER_SETUP=1
zookeeper:

View File

@@ -44,6 +44,4 @@
<shard>01</shard>
<replica>01</replica>
</macros>
<user_defined_executable_functions_config>*function.xml</user_defined_executable_functions_config>
<user_scripts_path>/var/lib/clickhouse/user_scripts/</user_scripts_path>
</clickhouse>

19
.github/CODEOWNERS vendored
View File

@@ -1,6 +1,8 @@
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
# Owners are automatically requested for review for PRs that changes code that they own.
# Owners are automatically requested for review for PRs that changes code
# that they own.
/frontend/ @SigNoz/frontend-maintainers
@@ -9,10 +11,8 @@
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
# CI
/deploy/ @therealpandey
.github @therealpandey
go.mod @therealpandey
/deploy/ @SigNoz/devops
.github @SigNoz/devops
# Scaffold Owners
@@ -105,10 +105,6 @@ go.mod @therealpandey
/pkg/modules/authdomain/ @vikrantgupta25
/pkg/modules/role/ @vikrantgupta25
# IdentN Owners
/pkg/identn/ @vikrantgupta25
/pkg/http/middleware/identn.go @vikrantgupta25
# Integration tests
/tests/integration/ @vikrantgupta25
@@ -131,15 +127,12 @@ go.mod @therealpandey
/frontend/src/pages/DashboardsListPage/ @SigNoz/pulse-frontend
/frontend/src/container/ListOfDashboard/ @SigNoz/pulse-frontend
# Dashboard Widget Page
/frontend/src/pages/DashboardWidget/ @SigNoz/pulse-frontend
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
## Dashboard Page
/frontend/src/pages/DashboardPage/ @SigNoz/pulse-frontend
/frontend/src/container/DashboardContainer/ @SigNoz/pulse-frontend
/frontend/src/container/GridCardLayout/ @SigNoz/pulse-frontend
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
## Public Dashboard Page

View File

@@ -102,3 +102,13 @@ jobs:
run: |
go run cmd/enterprise/*.go generate openapi
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
- name: node-install
uses: actions/setup-node@v5
with:
node-version: "22"
- name: install-frontend
run: cd frontend && yarn install
- name: generate-api-clients
run: |
cd frontend && yarn generate:api
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)

View File

@@ -29,7 +29,6 @@ jobs:
- name: fmt
run: |
make py-fmt
git diff --exit-code -- tests/integration/
- name: lint
run: |
make py-lint
@@ -50,7 +49,6 @@ jobs:
- ttl
- alerts
- ingestionkeys
- rootuser
sqlstore-provider:
- postgres
- sqlite

View File

@@ -52,16 +52,16 @@ jobs:
with:
PRIMUS_REF: main
JS_SRC: frontend
languages:
md-languages:
if: |
github.event_name == 'merge_group' ||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
runs-on: ubuntu-latest
steps:
- name: self-checkout
- name: checkout
uses: actions/checkout@v4
- name: run
- name: validate md languages
run: bash frontend/scripts/validate-md-languages.sh
authz:
if: |
@@ -70,55 +70,44 @@ jobs:
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
runs-on: ubuntu-latest
steps:
- name: self-checkout
- name: Checkout code
uses: actions/checkout@v5
- name: node-install
- name: Set up Node.js
uses: actions/setup-node@v5
with:
node-version: "22"
- name: deps-install
- name: Install frontend dependencies
working-directory: ./frontend
run: |
yarn install
- name: uv-install
- name: Install uv
uses: astral-sh/setup-uv@v5
- name: uv-deps
- name: Install Python dependencies
working-directory: ./tests/integration
run: |
uv sync
- name: setup-test
- name: Start test environment
run: |
make py-test-setup
- name: generate
- name: Generate permissions.type.ts
working-directory: ./frontend
run: |
yarn generate:permissions-type
- name: teardown-test
- name: Teardown test environment
if: always()
run: |
make py-test-teardown
- name: validate
- name: Check for changes
run: |
if ! git diff --exit-code frontend/src/hooks/useAuthZ/permissions.type.ts; then
echo "::error::frontend/src/hooks/useAuthZ/permissions.type.ts is out of date. Please run the generator locally and commit the changes: npm run generate:permissions-type (from the frontend directory)"
exit 1
fi
openapi:
if: |
github.event_name == 'merge_group' ||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
runs-on: ubuntu-latest
steps:
- name: self-checkout
uses: actions/checkout@v4
- name: node-install
uses: actions/setup-node@v5
with:
node-version: "22"
- name: install-frontend
run: cd frontend && yarn install
- name: generate-api-clients
run: |
cd frontend && yarn generate:api
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)

View File

@@ -1,60 +0,0 @@
name: mergequeueci
on:
pull_request:
types:
- dequeued
jobs:
notify:
runs-on: ubuntu-latest
if: github.event.pull_request.merged == false
steps:
- name: alert
uses: slackapi/slack-github-action@v2.1.1
with:
webhook: ${{ secrets.SLACK_MERGE_QUEUE_WEBHOOK }}
webhook-type: incoming-webhook
payload: |
{
"text": ":x: PR removed from merge queue",
"blocks": [
{
"type": "header",
"text": {
"type": "plain_text",
"text": ":x: PR Removed from Merge Queue"
}
},
{
"type": "section",
"text": {
"type": "mrkdwn",
"text": "*<${{ github.event.pull_request.html_url }}|PR #${{ github.event.pull_request.number }}: ${{ github.event.pull_request.title }}>*"
}
},
{
"type": "divider"
},
{
"type": "section",
"fields": [
{
"type": "mrkdwn",
"text": "*Author*\n@${{ github.event.pull_request.user.login }}"
}
]
}
]
}
- name: comment
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
PR_URL: ${{ github.event.pull_request.html_url }}
run: |
gh api repos/${{ github.repository }}/issues/$PR_NUMBER/comments \
-f body="> :x: **PR removed from merge queue**
>
> @$PR_AUTHOR your PR was removed from the merge queue. Fix the issue and re-queue when ready."

View File

@@ -35,7 +35,7 @@ linters:
- identical
sloglint:
no-mixed-args: true
attr-only: true
kv-only: true
no-global: all
context: all
static-msg: true

View File

@@ -17,7 +17,5 @@
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
},
"python-envs.defaultEnvManager": "ms-python.python:system",
"python-envs.pythonProjects": []
}
}

View File

@@ -4,15 +4,12 @@ import (
"context"
"log/slog"
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
@@ -31,17 +28,18 @@ import (
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
"github.com/spf13/cobra"
)
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
var configFiles []string
var flags signoz.DeprecatedFlags
serverCmd := &cobra.Command{
Use: "server",
Short: "Run the SigNoz server",
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
RunE: func(currCmd *cobra.Command, args []string) error {
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, configFiles)
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
if err != nil {
return err
}
@@ -50,7 +48,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
},
}
serverCmd.Flags().StringArrayVar(&configFiles, "config", nil, "path to a YAML configuration file (can be specified multiple times, later files override earlier ones)")
flags.RegisterFlags(serverCmd)
parentCmd.AddCommand(serverCmd)
}
@@ -92,37 +90,37 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
},
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
return err
}
server, err := app.NewServer(config, signoz)
if err != nil {
logger.ErrorContext(ctx, "failed to create server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create server", "error", err)
return err
}
if err := server.Start(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start server", "error", err)
return err
}
signoz.Start(ctx)
if err := signoz.Wait(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
return err
}
err = server.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop server", "error", err)
return err
}
err = signoz.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
return err
}

View File

@@ -10,23 +10,18 @@ import (
"github.com/SigNoz/signoz/pkg/signoz"
)
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, configFiles []string) (signoz.Config, error) {
uris := make([]string, 0, len(configFiles)+1)
for _, f := range configFiles {
uris = append(uris, "file:"+f)
}
uris = append(uris, "env:")
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) {
config, err := signoz.NewConfig(
ctx,
logger,
config.ResolverConfig{
Uris: uris,
Uris: []string{"env:"},
ProviderFactories: []config.ProviderFactory{
envprovider.NewFactory(),
fileprovider.NewFactory(),
},
},
flags,
)
if err != nil {
return signoz.Config{}, err

View File

@@ -1,86 +0,0 @@
package cmd
import (
"context"
"log/slog"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewSigNozConfig_NoConfigFiles(t *testing.T) {
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, nil)
require.NoError(t, err)
assert.NotZero(t, config)
}
func TestNewSigNozConfig_SingleConfigFile(t *testing.T) {
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
err := os.WriteFile(configPath, []byte(`
cache:
provider: "redis"
`), 0644)
require.NoError(t, err)
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, []string{configPath})
require.NoError(t, err)
assert.Equal(t, "redis", config.Cache.Provider)
}
func TestNewSigNozConfig_MultipleConfigFiles_LaterOverridesEarlier(t *testing.T) {
dir := t.TempDir()
basePath := filepath.Join(dir, "base.yaml")
err := os.WriteFile(basePath, []byte(`
cache:
provider: "memory"
sqlstore:
provider: "sqlite"
`), 0644)
require.NoError(t, err)
overridePath := filepath.Join(dir, "override.yaml")
err = os.WriteFile(overridePath, []byte(`
cache:
provider: "redis"
`), 0644)
require.NoError(t, err)
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, []string{basePath, overridePath})
require.NoError(t, err)
// Later file overrides earlier
assert.Equal(t, "redis", config.Cache.Provider)
// Value from base file that wasn't overridden persists
assert.Equal(t, "sqlite", config.SQLStore.Provider)
}
func TestNewSigNozConfig_EnvOverridesConfigFile(t *testing.T) {
dir := t.TempDir()
configPath := filepath.Join(dir, "config.yaml")
err := os.WriteFile(configPath, []byte(`
cache:
provider: "fromfile"
`), 0644)
require.NoError(t, err)
t.Setenv("SIGNOZ_CACHE_PROVIDER", "fromenv")
logger := slog.New(slog.DiscardHandler)
config, err := NewSigNozConfig(context.Background(), logger, []string{configPath})
require.NoError(t, err)
// Env should override file
assert.Equal(t, "fromenv", config.Cache.Provider)
}
func TestNewSigNozConfig_NonexistentFile(t *testing.T) {
logger := slog.New(slog.DiscardHandler)
_, err := NewSigNozConfig(context.Background(), logger, []string{"/nonexistent/config.yaml"})
assert.Error(t, err)
}

View File

@@ -5,18 +5,16 @@ import (
"log/slog"
"time"
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
eequerier "github.com/SigNoz/signoz/ee/querier"
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
eequerier "github.com/SigNoz/signoz/ee/querier"
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
@@ -25,7 +23,6 @@ import (
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/gateway"
"github.com/SigNoz/signoz/pkg/licensing"
@@ -41,17 +38,18 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/version"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/spf13/cobra"
)
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
var configFiles []string
var flags signoz.DeprecatedFlags
serverCmd := &cobra.Command{
Use: "server",
Short: "Run the SigNoz server",
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
RunE: func(currCmd *cobra.Command, args []string) error {
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, configFiles)
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
if err != nil {
return err
}
@@ -60,7 +58,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
},
}
serverCmd.Flags().StringArrayVar(&configFiles, "config", nil, "path to a YAML configuration file (can be specified multiple times, later files override earlier ones)")
flags.RegisterFlags(serverCmd)
parentCmd.AddCommand(serverCmd)
}
@@ -71,7 +69,7 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
// add enterprise sqlstore factories to the community sqlstore factories
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", errors.Attr(err))
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
return err
}
@@ -134,37 +132,37 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
)
if err != nil {
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
return err
}
server, err := enterpriseapp.NewServer(config, signoz)
if err != nil {
logger.ErrorContext(ctx, "failed to create server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to create server", "error", err)
return err
}
if err := server.Start(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start server", "error", err)
return err
}
signoz.Start(ctx)
if err := signoz.Wait(ctx); err != nil {
logger.ErrorContext(ctx, "failed to start signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
return err
}
err = server.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop server", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop server", "error", err)
return err
}
err = signoz.Stop(ctx)
if err != nil {
logger.ErrorContext(ctx, "failed to stop signoz", errors.Attr(err))
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
return err
}

View File

@@ -4,10 +4,9 @@ import (
"log/slog"
"os"
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/version"
"github.com/spf13/cobra"
"go.uber.org/zap" //nolint:depguard
)
var RootCmd = &cobra.Command{
@@ -20,9 +19,15 @@ var RootCmd = &cobra.Command{
}
func Execute(logger *slog.Logger) {
zapLogger := newZapLogger()
zap.ReplaceGlobals(zapLogger)
defer func() {
_ = zapLogger.Sync()
}()
err := RootCmd.Execute()
if err != nil {
logger.ErrorContext(RootCmd.Context(), "error running command", errors.Attr(err))
logger.ErrorContext(RootCmd.Context(), "error running command", "error", err)
os.Exit(1)
}
}

110
cmd/zap.go Normal file
View File

@@ -0,0 +1,110 @@
package cmd
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"go.uber.org/zap" //nolint:depguard
"go.uber.org/zap/zapcore" //nolint:depguard
)
// Deprecated: Use `NewLogger` from `pkg/instrumentation` instead.
func newZapLogger() *zap.Logger {
config := zap.NewProductionConfig()
config.EncoderConfig.TimeKey = "timestamp"
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
// Extract sampling config before building the logger.
// We need to disable sampling in the config and apply it manually later
// to ensure correct core ordering. See filteringCore documentation for details.
samplerConfig := config.Sampling
config.Sampling = nil
logger, _ := config.Build()
// Wrap with custom core wrapping to filter certain log entries.
// The order of wrapping is important:
// 1. First wrap with filteringCore
// 2. Then wrap with sampler
//
// This creates the call chain: sampler -> filteringCore -> ioCore
//
// During logging:
// - sampler.Check decides whether to sample the log entry
// - If sampled, filteringCore.Check is called
// - filteringCore adds itself to CheckedEntry.cores
// - All cores in CheckedEntry.cores have their Write method called
// - filteringCore.Write can now filter the entry before passing to ioCore
//
// If we didn't disable the sampler above, filteringCore would have wrapped
// sampler. By calling sampler.Check we would have allowed it to call
// ioCore.Check that adds itself to CheckedEntry.cores. Then ioCore.Write
// would have bypassed our checks, making filtering impossible.
return logger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core {
core = &filteringCore{core}
if samplerConfig != nil {
core = zapcore.NewSamplerWithOptions(
core,
time.Second,
samplerConfig.Initial,
samplerConfig.Thereafter,
)
}
return core
}))
}
// filteringCore wraps a zapcore.Core to filter out log entries based on a
// custom logic.
//
// Note: This core must be positioned before the sampler in the core chain
// to ensure Write is called. See newZapLogger for ordering details.
type filteringCore struct {
zapcore.Core
}
// filter determines whether a log entry should be written based on its fields.
// Returns false if the entry should be suppressed, true otherwise.
//
// Current filters:
// - context.Canceled: These are expected errors from cancelled operations,
// and create noise in logs.
func (c *filteringCore) filter(fields []zapcore.Field) bool {
for _, field := range fields {
if field.Type == zapcore.ErrorType {
if loggedErr, ok := field.Interface.(error); ok {
// Suppress logs containing context.Canceled errors
if errors.Is(loggedErr, context.Canceled) {
return false
}
}
}
}
return true
}
// With implements zapcore.Core.With
// It returns a new copy with the added context.
func (c *filteringCore) With(fields []zapcore.Field) zapcore.Core {
return &filteringCore{c.Core.With(fields)}
}
// Check implements zapcore.Core.Check.
// It adds this core to the CheckedEntry if the log level is enabled,
// ensuring that Write will be called for this entry.
func (c *filteringCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
if c.Enabled(ent.Level) {
return ce.AddCore(ent, c)
}
return ce
}
// Write implements zapcore.Core.Write.
// It filters log entries based on their fields before delegating to the wrapped core.
func (c *filteringCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
if !c.filter(fields) {
return nil
}
return c.Core.Write(ent, fields)
}

4
conf/cache-config.yml Normal file
View File

@@ -0,0 +1,4 @@
provider: "inmemory"
inmemory:
ttl: 60m
cleanupInterval: 10m

View File

@@ -39,13 +39,6 @@ instrumentation:
host: "0.0.0.0"
port: 9090
##################### PProf #####################
pprof:
# Whether to enable the pprof server.
enabled: true
# The address on which the pprof server listens.
address: 0.0.0.0:6060
##################### Web #####################
web:
# Whether to enable the web frontend
@@ -315,9 +308,6 @@ user:
allow_self: true
# The duration within which a user can reset their password.
max_token_lifetime: 6h
invite:
# The duration within which a user can accept their invite.
max_token_lifetime: 48h
root:
# Whether to enable the root user. When enabled, a root user is provisioned
# on startup using the email and password below. The root user cannot be
@@ -331,22 +321,3 @@ user:
org:
name: default
id: 00000000-0000-0000-0000-000000000000
##################### IdentN #####################
identn:
tokenizer:
# toggle tokenizer identN
enabled: true
# headers to use for tokenizer identN resolver
headers:
- Authorization
- Sec-WebSocket-Protocol
apikey:
# toggle apikey identN
enabled: true
# headers to use for apikey identN resolver
headers:
- SIGNOZ-API-KEY
impersonation:
# toggle impersonation identN, when enabled, all requests will impersonate the root user
enabled: false

25
conf/prometheus.yml Normal file
View File

@@ -0,0 +1,25 @@
# my global config
global:
scrape_interval: 5s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
# scrape_timeout is set to the global default (10s).
# Alertmanager configuration
alerting:
alertmanagers:
- static_configs:
- targets:
- 127.0.0.1:9093
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
rule_files:
# - "first_rules.yml"
# - "second_rules.yml"
- 'alerts.yml'
# A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself.
scrape_configs: []
remote_read:
- url: tcp://localhost:9000/signoz_metrics

View File

@@ -190,7 +190,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.116.1
image: signoz/signoz:v0.114.1
ports:
- "8080:8080" # signoz port
# - "6060:6060" # pprof port

View File

@@ -117,7 +117,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:v0.116.1
image: signoz/signoz:v0.114.1
ports:
- "8080:8080" # signoz port
volumes:

View File

@@ -0,0 +1,38 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
deploy:
restart_policy:
condition: on-failure
services:
hotrod:
<<: *common
image: jaegertracing/example-hotrod:1.61.0
command: [ "all" ]
environment:
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 #
load-hotrod:
<<: *common
image: "signoz/locust:1.2.3"
environment:
ATTACKED_HOST: http://hotrod:8080
LOCUST_MODE: standalone
NO_PROXY: standalone
TASK_DELAY_FROM: 5
TASK_DELAY_TO: 30
QUIET_MODE: "${QUIET_MODE:-false}"
LOCUST_OPTS: "--headless -u 10 -r 1"
volumes:
- ../../../common/locust-scripts:/locust
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -0,0 +1,69 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
deploy:
mode: global
restart_policy:
condition: on-failure
services:
otel-agent:
<<: *common
image: otel/opentelemetry-collector-contrib:0.111.0
command:
- --config=/etc/otel-collector-config.yaml
volumes:
- ./otel-agent-config.yaml:/etc/otel-collector-config.yaml
- /:/hostfs:ro
environment:
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
# Before exposing the ports, make sure the ports are not used by other services
# ports:
# - "4317:4317"
# - "4318:4318"
otel-metrics:
<<: *common
image: otel/opentelemetry-collector-contrib:0.111.0
user: 0:0 # If you have security concerns, you can replace this with your `UID:GID` that has necessary permissions to docker.sock
command:
- --config=/etc/otel-collector-config.yaml
volumes:
- ./otel-metrics-config.yaml:/etc/otel-collector-config.yaml
- /var/run/docker.sock:/var/run/docker.sock
environment:
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
# Before exposing the ports, make sure the ports are not used by other services
# ports:
# - "4317:4317"
# - "4318:4318"
deploy:
mode: replicated
replicas: 1
placement:
constraints:
- node.role == manager
logspout:
<<: *common
image: "gliderlabs/logspout:v3.2.14"
command: syslog+tcp://otel-agent:2255
user: root
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- otel-agent
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -0,0 +1,102 @@
receivers:
hostmetrics:
collection_interval: 30s
root_path: /hostfs
scrapers:
cpu: {}
load: {}
memory: {}
disk: {}
filesystem: {}
network: {}
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-agent
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-agent
tcplog/docker:
listen_address: "0.0.0.0:2255"
operators:
- type: regex_parser
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
timestamp:
parse_from: attributes.timestamp
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
- type: move
from: attributes["body"]
to: body
- type: remove
field: attributes.timestamp
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
expr: 'attributes.container_name matches "^(signoz_(logspout|signoz|otel-collector|clickhouse|zookeeper))|(infra_(logspout|otel-agent|otel-metrics)).*"'
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors:
# - ec2
# - gcp
# - azure
- env
- system
timeout: 2s
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
otlp:
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
tls:
insecure: true
headers:
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
pipelines:
traces:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/hostmetrics:
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/prometheus:
receivers: [prometheus]
processors: [resourcedetection, batch]
exporters: [otlp]
logs:
receivers: [otlp, tcplog/docker]
processors: [resourcedetection, batch]
exporters: [otlp]

View File

@@ -0,0 +1,103 @@
receivers:
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-metrics
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-metrics
# For Docker daemon metrics to be scraped, it must be configured to expose
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
# - job_name: docker-daemon
# dockerswarm_sd_configs:
# - host: unix:///var/run/docker.sock
# role: nodes
# relabel_configs:
# - source_labels: [__meta_dockerswarm_node_address]
# target_label: __address__
# replacement: $1:9323
- job_name: "dockerswarm"
dockerswarm_sd_configs:
- host: unix:///var/run/docker.sock
role: tasks
relabel_configs:
- action: keep
regex: running
source_labels:
- __meta_dockerswarm_task_desired_state
- action: keep
regex: true
source_labels:
- __meta_dockerswarm_service_label_signoz_io_scrape
- regex: ([^:]+)(?::\d+)?
replacement: $1
source_labels:
- __address__
target_label: swarm_container_ip
- separator: .
source_labels:
- __meta_dockerswarm_service_name
- __meta_dockerswarm_task_slot
- __meta_dockerswarm_task_id
target_label: swarm_container_name
- target_label: __address__
source_labels:
- swarm_container_ip
- __meta_dockerswarm_service_label_signoz_io_port
separator: ":"
- source_labels:
- __meta_dockerswarm_service_label_signoz_io_path
target_label: __metrics_path__
- source_labels:
- __meta_dockerswarm_service_label_com_docker_stack_namespace
target_label: namespace
- source_labels:
- __meta_dockerswarm_service_name
target_label: service_name
- source_labels:
- __meta_dockerswarm_task_id
target_label: service_instance_id
- source_labels:
- __meta_dockerswarm_node_hostname
target_label: host_name
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
detectors:
- env
- system
timeout: 2s
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
otlp:
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
tls:
insecure: true
headers:
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
pipelines:
metrics:
receivers: [prometheus]
processors: [resourcedetection, batch]
exporters: [otlp]

View File

@@ -181,7 +181,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.116.1}
image: signoz/signoz:${VERSION:-v0.114.1}
container_name: signoz
ports:
- "8080:8080" # signoz port

View File

@@ -109,7 +109,7 @@ services:
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
signoz:
!!merge <<: *db-depend
image: signoz/signoz:${VERSION:-v0.116.1}
image: signoz/signoz:${VERSION:-v0.114.1}
container_name: signoz
ports:
- "8080:8080" # signoz port

View File

@@ -0,0 +1,39 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
restart: unless-stopped
services:
hotrod:
<<: *common
image: jaegertracing/example-hotrod:1.61.0
container_name: hotrod
command: [ "all" ]
environment:
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 # In case of external SigNoz or cloud, update the endpoint and access token
# - OTEL_OTLP_HEADERS=signoz-access-token=<your-access-token>
load-hotrod:
<<: *common
image: "signoz/locust:1.2.3"
container_name: load-hotrod
environment:
ATTACKED_HOST: http://hotrod:8080
LOCUST_MODE: standalone
NO_PROXY: standalone
TASK_DELAY_FROM: 5
TASK_DELAY_TO: 30
QUIET_MODE: "${QUIET_MODE:-false}"
LOCUST_OPTS: "--headless -u 10 -r 1"
volumes:
- ../../../common/locust-scripts:/locust
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -0,0 +1,43 @@
version: "3"
x-common: &common
networks:
- signoz-net
extra_hosts:
- host.docker.internal:host-gateway
logging:
options:
max-size: 50m
max-file: "3"
restart: unless-stopped
services:
otel-agent:
<<: *common
image: otel/opentelemetry-collector-contrib:0.111.0
command:
- --config=/etc/otel-collector-config.yaml
volumes:
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
- /:/hostfs:ro
- /var/run/docker.sock:/var/run/docker.sock
environment:
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux # Replace signoz-host with the actual hostname
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
# Before exposing the ports, make sure the ports are not used by other services
# ports:
# - "4317:4317"
# - "4318:4318"
logspout:
<<: *common
image: "gliderlabs/logspout:v3.2.14"
volumes:
- /etc/hostname:/etc/host_hostname:ro
- /var/run/docker.sock:/var/run/docker.sock
command: syslog+tcp://otel-agent:2255
depends_on:
- otel-agent
networks:
signoz-net:
name: signoz-net
external: true

View File

@@ -0,0 +1,139 @@
receivers:
hostmetrics:
collection_interval: 30s
root_path: /hostfs
scrapers:
cpu: {}
load: {}
memory: {}
disk: {}
filesystem: {}
network: {}
otlp:
protocols:
grpc:
endpoint: 0.0.0.0:4317
http:
endpoint: 0.0.0.0:4318
prometheus:
config:
global:
scrape_interval: 60s
scrape_configs:
- job_name: otel-collector
static_configs:
- targets:
- localhost:8888
labels:
job_name: otel-collector
# For Docker daemon metrics to be scraped, it must be configured to expose
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
# - job_name: docker-daemon
# static_configs:
# - targets:
# - host.docker.internal:9323
# labels:
# job_name: docker-daemon
- job_name: docker-container
docker_sd_configs:
- host: unix:///var/run/docker.sock
relabel_configs:
- action: keep
regex: true
source_labels:
- __meta_docker_container_label_signoz_io_scrape
- regex: true
source_labels:
- __meta_docker_container_label_signoz_io_path
target_label: __metrics_path__
- regex: (.+)
source_labels:
- __meta_docker_container_label_signoz_io_path
target_label: __metrics_path__
- separator: ":"
source_labels:
- __meta_docker_network_ip
- __meta_docker_container_label_signoz_io_port
target_label: __address__
- regex: '/(.*)'
replacement: '$1'
source_labels:
- __meta_docker_container_name
target_label: container_name
- regex: __meta_docker_container_label_signoz_io_(.+)
action: labelmap
replacement: $1
tcplog/docker:
listen_address: "0.0.0.0:2255"
operators:
- type: regex_parser
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
timestamp:
parse_from: attributes.timestamp
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
- type: move
from: attributes["body"]
to: body
- type: remove
field: attributes.timestamp
# please remove names from below if you want to collect logs from them
- type: filter
id: signoz_logs_filter
expr: 'attributes.container_name matches "^signoz|(signoz-(|otel-collector|clickhouse|zookeeper))|(infra-(logspout|otel-agent)-.*)"'
processors:
batch:
send_batch_size: 10000
send_batch_max_size: 11000
timeout: 10s
resourcedetection:
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
detectors:
# - ec2
# - gcp
# - azure
- env
- system
timeout: 2s
extensions:
health_check:
endpoint: 0.0.0.0:13133
pprof:
endpoint: 0.0.0.0:1777
exporters:
otlp:
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
tls:
insecure: true
headers:
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
# debug: {}
service:
telemetry:
logs:
encoding: json
metrics:
address: 0.0.0.0:8888
extensions:
- health_check
- pprof
pipelines:
traces:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics:
receivers: [otlp]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/hostmetrics:
receivers: [hostmetrics]
processors: [resourcedetection, batch]
exporters: [otlp]
metrics/prometheus:
receivers: [prometheus]
processors: [resourcedetection, batch]
exporters: [otlp]
logs:
receivers: [otlp, tcplog/docker]
processors: [resourcedetection, batch]
exporters: [otlp]

File diff suppressed because it is too large Load Diff

View File

@@ -123,7 +123,6 @@ if err := router.Handle("/api/v1/things", handler.New(
Description: "This endpoint creates a thing",
Request: new(types.PostableThing),
RequestContentType: "application/json",
RequestQuery: new(types.QueryableThing),
Response: new(types.GettableThing),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusCreated,
@@ -156,8 +155,6 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
- **Request / RequestContentType**:
- `Request` is a Go type that describes the request body or form.
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
- **RequestQuery**:
- `RequestQuery` is a Go type that descirbes query url params.
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
- **Response / ResponseContentType**:
- `Response` is the Go type for the successful response payload.

View File

@@ -273,7 +273,6 @@ Options can be simple (direct link) or nested (with another question):
- Place logo files in `public/Logos/`
- Use SVG format
- Reference as `"/Logos/your-logo.svg"`
- **Fetching Icons**: New icons can be easily fetched from [OpenBrand](https://openbrand.sh/). Use the pattern `https://openbrand.sh/?url=<TARGET_URL>`, where `<TARGET_URL>` is the URL-encoded link to the service's website. For example, to get Render's logo, use [https://openbrand.sh/?url=https%3A%2F%2Frender.com](https://openbrand.sh/?url=https%3A%2F%2Frender.com).
- **Optimize new SVGs**: Run any newly downloaded SVGs through an optimizer like [SVGOMG (svgo)](https://svgomg.net/) or use `npx svgo public/Logos/your-logo.svg` to minimise their size before committing.
### 4. Links

View File

@@ -74,37 +74,37 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
instrumentationtypes.CodeFunctionName: "getResults",
})
// TODO(srikanthccv): parallelize this?
p.logger.InfoContext(ctx, "fetching results for current period", slog.Any("anomaly_current_period_query", params.CurrentPeriodQuery))
p.logger.InfoContext(ctx, "fetching results for current period", "anomaly_current_period_query", params.CurrentPeriodQuery)
currentPeriodResults, err := p.querier.QueryRange(ctx, orgID, &params.CurrentPeriodQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past period", slog.Any("anomaly_past_period_query", params.PastPeriodQuery))
p.logger.InfoContext(ctx, "fetching results for past period", "anomaly_past_period_query", params.PastPeriodQuery)
pastPeriodResults, err := p.querier.QueryRange(ctx, orgID, &params.PastPeriodQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for current season", slog.Any("anomaly_current_season_query", params.CurrentSeasonQuery))
p.logger.InfoContext(ctx, "fetching results for current season", "anomaly_current_season_query", params.CurrentSeasonQuery)
currentSeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.CurrentSeasonQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past season", slog.Any("anomaly_past_season_query", params.PastSeasonQuery))
p.logger.InfoContext(ctx, "fetching results for past season", "anomaly_past_season_query", params.PastSeasonQuery)
pastSeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.PastSeasonQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past 2 season", slog.Any("anomaly_past_2season_query", params.Past2SeasonQuery))
p.logger.InfoContext(ctx, "fetching results for past 2 season", "anomaly_past_2season_query", params.Past2SeasonQuery)
past2SeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.Past2SeasonQuery)
if err != nil {
return nil, err
}
p.logger.InfoContext(ctx, "fetching results for past 3 season", slog.Any("anomaly_past_3season_query", params.Past3SeasonQuery))
p.logger.InfoContext(ctx, "fetching results for past 3 season", "anomaly_past_3season_query", params.Past3SeasonQuery)
past3SeasonResults, err := p.querier.QueryRange(ctx, orgID, &params.Past3SeasonQuery)
if err != nil {
return nil, err
@@ -212,17 +212,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
if predictedValue < 0 {
// this should not happen (except when the data has extreme outliers)
// we will use the moving avg of the previous period series in this case
p.logger.WarnContext(ctx, "predicted value is less than 0 for series", slog.Float64("anomaly_predicted_value", predictedValue), slog.Any("anomaly_labels", series.Labels))
p.logger.WarnContext(ctx, "predicted value is less than 0 for series", "anomaly_predicted_value", predictedValue, "anomaly_labels", series.Labels)
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
}
p.logger.DebugContext(ctx, "predicted value for series",
slog.Float64("anomaly_moving_avg", movingAvg),
slog.Float64("anomaly_avg", avg),
slog.Float64("anomaly_mean", mean),
slog.Any("anomaly_labels", series.Labels),
slog.Float64("anomaly_predicted_value", predictedValue),
slog.Float64("anomaly_curr", curr.Value),
"anomaly_moving_avg", movingAvg,
"anomaly_avg", avg,
"anomaly_mean", mean,
"anomaly_labels", series.Labels,
"anomaly_predicted_value", predictedValue,
"anomaly_curr", curr.Value,
)
predictedSeries.Values = append(predictedSeries.Values, &qbtypes.TimeSeriesValue{
Timestamp: curr.Timestamp,
@@ -412,7 +412,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
stdDev := p.getStdDev(currentSeasonSeries)
p.logger.InfoContext(ctx, "calculated standard deviation for series", slog.Float64("anomaly_std_dev", stdDev), slog.Any("anomaly_labels", series.Labels))
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
prevSeriesAvg := p.getAvg(pastPeriodSeries)
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
@@ -420,12 +420,12 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
p.logger.InfoContext(ctx, "calculated mean for series",
slog.Float64("anomaly_prev_series_avg", prevSeriesAvg),
slog.Float64("anomaly_current_season_series_avg", currentSeasonSeriesAvg),
slog.Float64("anomaly_past_season_series_avg", pastSeasonSeriesAvg),
slog.Float64("anomaly_past_2season_series_avg", past2SeasonSeriesAvg),
slog.Float64("anomaly_past_3season_series_avg", past3SeasonSeriesAvg),
slog.Any("anomaly_labels", series.Labels),
"anomaly_prev_series_avg", prevSeriesAvg,
"anomaly_current_season_series_avg", currentSeasonSeriesAvg,
"anomaly_past_season_series_avg", pastSeasonSeriesAvg,
"anomaly_past_2season_series_avg", past2SeasonSeriesAvg,
"anomaly_past_3season_series_avg", past3SeasonSeriesAvg,
"anomaly_labels", series.Labels,
)
predictedSeries := p.getPredictedSeries(

View File

@@ -3,7 +3,6 @@ package oidccallbackauthn
import (
"context"
"fmt"
"log/slog"
"net/url"
"github.com/SigNoz/signoz/pkg/authn"
@@ -151,7 +150,7 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
// Some IDPs return a single group as a string instead of an array
groups = append(groups, g)
default:
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", slog.String("type", fmt.Sprintf("%T", claimValue)))
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
}
}
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
@@ -22,7 +23,7 @@ type provider struct {
pkgAuthzService authz.AuthZ
openfgaServer *openfgaserver.Server
licensing licensing.Licensing
store authtypes.RoleStore
store roletypes.Store
registry []authz.RegisterTypeable
}
@@ -57,10 +58,6 @@ func (provider *provider) Start(ctx context.Context) error {
return provider.openfgaServer.Start(ctx)
}
func (provider *provider) Healthy() <-chan struct{} {
return provider.openfgaServer.Healthy()
}
func (provider *provider) Stop(ctx context.Context) error {
return provider.openfgaServer.Stop(ctx)
}
@@ -85,23 +82,23 @@ func (provider *provider) Write(ctx context.Context, additions []*openfgav1.Tupl
return provider.openfgaServer.Write(ctx, additions, deletions)
}
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.Role, error) {
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
return provider.pkgAuthzService.Get(ctx, orgID, id)
}
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*authtypes.Role, error) {
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
}
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*authtypes.Role, error) {
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.List(ctx, orgID)
}
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*authtypes.Role, error) {
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
}
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*authtypes.Role, error) {
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.Role, error) {
return provider.pkgAuthzService.ListByOrgIDAndIDs(ctx, orgID, ids)
}
@@ -117,7 +114,7 @@ func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names [
return provider.pkgAuthzService.Revoke(ctx, orgID, names, subject)
}
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*authtypes.Role) error {
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
}
@@ -139,16 +136,16 @@ func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context,
return provider.Write(ctx, tuples, nil)
}
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) error {
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) (*authtypes.Role, error) {
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
@@ -162,10 +159,10 @@ func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, ro
}
if existingRole != nil {
return authtypes.NewRoleFromStorableRole(existingRole), nil
return roletypes.NewRoleFromStorableRole(existingRole), nil
}
err = provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
if err != nil {
return nil, err
}
@@ -220,13 +217,13 @@ func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id
return objects, nil
}
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) error {
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
_, err := provider.licensing.GetActive(ctx, orgID)
if err != nil {
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
return provider.store.Update(ctx, orgID, authtypes.NewStorableRoleFromRole(role))
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
}
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
@@ -235,12 +232,12 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
}
additionTuples, err := authtypes.GetAdditionTuples(name, orgID, relation, additions)
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
if err != nil {
return err
}
deletionTuples, err := authtypes.GetDeletionTuples(name, orgID, relation, deletions)
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
if err != nil {
return err
}
@@ -264,7 +261,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
return err
}
role := authtypes.NewRoleFromStorableRole(storableRole)
role := roletypes.NewRoleFromStorableRole(storableRole)
err = role.ErrIfManaged()
if err != nil {
return err
@@ -274,7 +271,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
}
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
return []authtypes.Typeable{authtypes.TypeableRole, authtypes.TypeableResourcesRoles}
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
}
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
@@ -286,7 +283,7 @@ func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID va
adminSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAdminRoleName),
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
},
orgID,
)
@@ -301,7 +298,7 @@ func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID va
anonymousSubject,
authtypes.RelationAssignee,
[]authtypes.Selector{
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAnonymousRoleName),
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
},
orgID,
)

View File

@@ -2,45 +2,39 @@ module base
type organisation
relations
define read: [user, serviceaccount, role#assignee]
define update: [user, serviceaccount, role#assignee]
define read: [user, role#assignee]
define update: [user, role#assignee]
type user
relations
define read: [user, serviceaccount, role#assignee]
define update: [user, serviceaccount, role#assignee]
define delete: [user, serviceaccount, role#assignee]
type serviceaccount
relations
define read: [user, serviceaccount, role#assignee]
define update: [user, serviceaccount, role#assignee]
define delete: [user, serviceaccount, role#assignee]
define read: [user, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
type anonymous
type role
relations
define assignee: [user, serviceaccount, anonymous]
define assignee: [user, anonymous]
define read: [user, serviceaccount, role#assignee]
define update: [user, serviceaccount, role#assignee]
define delete: [user, serviceaccount, role#assignee]
define read: [user, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
type metaresources
relations
define create: [user, serviceaccount, role#assignee]
define list: [user, serviceaccount, role#assignee]
define create: [user, role#assignee]
define list: [user, role#assignee]
type metaresource
relations
define read: [user, serviceaccount, anonymous, role#assignee]
define update: [user, serviceaccount, role#assignee]
define delete: [user, serviceaccount, role#assignee]
define read: [user, anonymous, role#assignee]
define update: [user, role#assignee]
define delete: [user, role#assignee]
define block: [user, serviceaccount, role#assignee]
define block: [user, role#assignee]
type telemetryresource
relations
define read: [user, serviceaccount, role#assignee]
define read: [user, role#assignee]

View File

@@ -16,6 +16,7 @@ type Server struct {
}
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
return &Server{
pkgAuthzService: pkgAuthzService,
}, nil
@@ -25,10 +26,6 @@ func (server *Server) Start(ctx context.Context) error {
return server.pkgAuthzService.Start(ctx)
}
func (server *Server) Healthy() <-chan struct{} {
return server.pkgAuthzService.Healthy()
}
func (server *Server) Stop(ctx context.Context) error {
return server.pkgAuthzService.Stop(ctx)
}

View File

@@ -3,11 +3,8 @@ package httplicensing
import (
"context"
"encoding/json"
"log/slog"
"time"
"github.com/tidwall/gjson"
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/errors"
@@ -19,6 +16,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/SigNoz/signoz/pkg/zeus"
"github.com/tidwall/gjson"
)
type provider struct {
@@ -57,7 +55,7 @@ func (provider *provider) Start(ctx context.Context) error {
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
for {
@@ -67,7 +65,7 @@ func (provider *provider) Start(ctx context.Context) error {
case <-tick.C:
err := provider.Validate(ctx)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
}
}
}
@@ -135,7 +133,7 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
if errors.Ast(err, errors.TypeNotFound) {
return nil
}
provider.settings.Logger().ErrorContext(ctx, "license validation failed", slog.String("org_id", organizationID.StringValue()))
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
return err
}
@@ -200,10 +198,7 @@ func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UU
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
if err != nil {
if errors.Ast(err, errors.TypeAlreadyExists) {
return nil, errors.WithAdditionalf(err, "checkout has already been completed for this account. Please click 'Refresh Status' to sync your subscription")
}
return nil, err
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session")
}
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
@@ -222,7 +217,7 @@ func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
if err != nil {
return nil, err
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session")
}
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil

View File

@@ -19,6 +19,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -223,7 +224,7 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
return map[string][]*authtypes.Transaction{
authtypes.SigNozAnonymousRoleName: {
roletypes.SigNozAnonymousRoleName: {
{
ID: valuer.GenerateUUID(),
Relation: authtypes.RelationRead,

View File

@@ -6,6 +6,7 @@ import (
"encoding/json"
"io"
"net/http"
"runtime/debug"
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
@@ -53,6 +54,26 @@ func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
return
}
defer func() {
if r := recover(); r != nil {
stackTrace := string(debug.Stack())
queryJSON, _ := json.Marshal(queryRangeRequest)
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
"error", r,
"user", claims.UserID,
"payload", string(queryJSON),
"stacktrace", stackTrace,
)
render.Error(rw, errors.NewInternalf(
errors.CodeInternal,
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
))
}
}()
if err := queryRangeRequest.Validate(); err != nil {
render.Error(rw, err)
return

View File

@@ -2,7 +2,6 @@ package anomaly
import (
"context"
"log/slog"
"math"
"time"
@@ -14,6 +13,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
var (
@@ -67,7 +67,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
instrumentationtypes.CodeNamespace: "anomaly",
instrumentationtypes.CodeFunctionName: "getResults",
})
slog.InfoContext(ctx, "fetching results for current period", "current_period_query", params.CurrentPeriodQuery)
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
if err != nil {
return nil, err
@@ -78,7 +78,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err
}
slog.InfoContext(ctx, "fetching results for past period", "past_period_query", params.PastPeriodQuery)
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
if err != nil {
return nil, err
@@ -89,7 +89,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err
}
slog.InfoContext(ctx, "fetching results for current season", "current_season_query", params.CurrentSeasonQuery)
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
if err != nil {
return nil, err
@@ -100,7 +100,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err
}
slog.InfoContext(ctx, "fetching results for past season", "past_season_query", params.PastSeasonQuery)
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
if err != nil {
return nil, err
@@ -111,7 +111,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err
}
slog.InfoContext(ctx, "fetching results for past 2 season", "past_2_season_query", params.Past2SeasonQuery)
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
if err != nil {
return nil, err
@@ -122,7 +122,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
return nil, err
}
slog.InfoContext(ctx, "fetching results for past 3 season", "past_3_season_query", params.Past3SeasonQuery)
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
if err != nil {
return nil, err
@@ -235,17 +235,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
if predictedValue < 0 {
// this should not happen (except when the data has extreme outliers)
// we will use the moving avg of the previous period series in this case
slog.Warn("predicted value is less than 0", "predicted_value", predictedValue, "labels", series.Labels)
zap.L().Warn("predictedValue is less than 0", zap.Float64("predictedValue", predictedValue), zap.Any("labels", series.Labels))
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
}
slog.Debug("predicted series",
"moving_avg", movingAvg,
"avg", avg,
"mean", mean,
"labels", series.Labels,
"predicted_value", predictedValue,
"curr", curr.Value,
zap.L().Debug("predictedSeries",
zap.Float64("movingAvg", movingAvg),
zap.Float64("avg", avg),
zap.Float64("mean", mean),
zap.Any("labels", series.Labels),
zap.Float64("predictedValue", predictedValue),
zap.Float64("curr", curr.Value),
)
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
Timestamp: curr.Timestamp,
@@ -418,7 +418,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
for _, series := range result.Series {
stdDev := p.getStdDev(series)
slog.InfoContext(ctx, "computed standard deviation", "std_dev", stdDev, "labels", series.Labels)
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
@@ -431,7 +431,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
slog.InfoContext(ctx, "computed averages", "prev_series_avg", prevSeriesAvg, "current_season_series_avg", currentSeasonSeriesAvg, "past_season_series_avg", pastSeasonSeriesAvg, "past_2_season_series_avg", past2SeasonSeriesAvg, "past_3_season_series_avg", past3SeasonSeriesAvg, "labels", series.Labels)
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
predictedSeries := p.getPredictedSeries(
series,

View File

@@ -10,8 +10,6 @@ import (
"strings"
"time"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -20,6 +18,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/gorilla/mux"
"go.uber.org/zap"
)
type CloudIntegrationConnectionParamsResponse struct {
@@ -72,7 +71,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
// Return the API Key (PAT) even if the rest of the params can not be deduced.
// Params not returned from here will be requested from the user via form inputs.
// This enables gracefully degraded but working experience even for non-cloud deployments.
slog.InfoContext(r.Context(), "ingestion params and signoz api url can not be deduced since no license was found")
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
ah.Respond(w, result)
return
}
@@ -104,7 +103,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
result.IngestionKey = ingestionKey
} else {
slog.InfoContext(r.Context(), "ingestion key can't be deduced since no gateway url has been configured")
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
}
ah.Respond(w, result)
@@ -127,7 +126,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
))
}
allPats, err := ah.Signoz.Modules.UserSetter.ListAPIKeys(ctx, orgIdUUID)
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't list PATs: %w", err,
@@ -139,8 +138,9 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
}
}
slog.InfoContext(ctx, "no PAT found for cloud integration, creating a new one",
"cloud_provider", cloudProvider,
zap.L().Info(
"no PAT found for cloud integration, creating a new one",
zap.String("cloudProvider", cloudProvider),
)
newPAT, err := types.NewStorableAPIKey(
@@ -155,7 +155,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
))
}
err = ah.Signoz.Modules.UserSetter.CreateAPIKey(ctx, newPAT)
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
if err != nil {
return "", basemodel.InternalError(fmt.Errorf(
"couldn't create cloud integration PAT: %w", err,
@@ -170,19 +170,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, valuer.MustNewUUID(orgId), types.UserStatusActive)
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId), types.UserStatusActive)
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.UserSetter.GetOrCreateUser(
ctx,
cloudIntegrationUser,
user.WithFactorPassword(password),
user.WithRoleNames([]string{authtypes.SigNozViewerRoleName}),
)
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
}
@@ -292,8 +287,9 @@ func getOrCreateCloudProviderIngestionKey(
}
}
slog.InfoContext(ctx, "no existing ingestion key found for cloud integration, creating a new one",
"cloud_provider", cloudProvider,
zap.L().Info(
"no existing ingestion key found for cloud integration, creating a new one",
zap.String("cloudProvider", cloudProvider),
)
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",

View File

@@ -4,15 +4,10 @@ import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"time"
signozerrors "github.com/SigNoz/signoz/pkg/errors"
"log/slog"
"github.com/SigNoz/signoz/ee/query-service/constants"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/http/render"
@@ -20,6 +15,7 @@ import (
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/licensetypes"
"github.com/SigNoz/signoz/pkg/valuer"
"go.uber.org/zap"
)
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
@@ -39,23 +35,23 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
}
if constants.FetchFeatures == "true" {
slog.DebugContext(ctx, "fetching license")
zap.L().Debug("fetching license")
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
if err != nil {
slog.ErrorContext(ctx, "failed to fetch license", signozerrors.Attr(err))
zap.L().Error("failed to fetch license", zap.Error(err))
} else if license == nil {
slog.DebugContext(ctx, "no active license found")
zap.L().Debug("no active license found")
} else {
licenseKey := license.Key
slog.DebugContext(ctx, "fetching zeus features")
zap.L().Debug("fetching zeus features")
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
if err == nil {
slog.DebugContext(ctx, "fetched zeus features", "features", zeusFeatures)
zap.L().Debug("fetched zeus features", zap.Any("features", zeusFeatures))
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
} else {
slog.ErrorContext(ctx, "failed to fetch zeus features", signozerrors.Attr(err))
zap.L().Error("failed to fetch zeus features", zap.Error(err))
}
}
}

View File

@@ -7,7 +7,6 @@ import (
"net/http"
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
@@ -15,7 +14,7 @@ import (
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/valuer"
"log/slog"
"go.uber.org/zap"
)
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
@@ -36,7 +35,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
if apiErrorObj != nil {
slog.ErrorContext(r.Context(), "error parsing metric query range params", errors.Attr(apiErrorObj.Err))
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
RespondError(w, apiErrorObj, nil)
return
}
@@ -45,7 +44,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
// add temporality for each metric
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
if temporalityErr != nil {
slog.ErrorContext(r.Context(), "error while adding temporality for metrics", errors.Attr(temporalityErr))
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
return
}

View File

@@ -5,23 +5,19 @@ import (
"fmt"
"net"
"net/http"
_ "net/http/pprof" // http profiler
"slices"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
"github.com/SigNoz/signoz/pkg/cache/memorycache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
"go.opentelemetry.io/otel/propagation"
"github.com/gorilla/handlers"
"github.com/rs/cors"
"github.com/soheilhy/cmux"
"github.com/SigNoz/signoz/ee/query-service/app/api"
"github.com/SigNoz/signoz/ee/query-service/rules"
"github.com/SigNoz/signoz/ee/query-service/usage"
@@ -35,8 +31,8 @@ import (
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/telemetrystore"
"github.com/SigNoz/signoz/pkg/web"
"log/slog"
"github.com/rs/cors"
"github.com/soheilhy/cmux"
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -51,6 +47,7 @@ import (
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils"
"go.uber.org/zap"
)
// Server runs HTTP, Mux and a grpc server
@@ -86,7 +83,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
}
reader := clickhouseReader.NewReader(
signoz.Instrumentation.Logger(),
signoz.SQLStore,
signoz.TelemetryStore,
signoz.Prometheus,
@@ -210,7 +206,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
r := baseapp.NewRouter()
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
r.Use(middleware.NewRecovery(s.signoz.Instrumentation.Logger()).Wrap)
r.Use(otelmux.Middleware(
"apiserver",
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
@@ -219,8 +214,10 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
otelmux.WithFilter(func(r *http.Request) bool {
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
}),
otelmux.WithPublicEndpoint(),
))
r.Use(middleware.NewIdentN(s.signoz.IdentNResolver, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap)
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
s.config.APIServer.Timeout.ExcludedRoutes,
s.config.APIServer.Timeout.Default,
@@ -281,7 +278,7 @@ func (s *Server) initListeners() error {
return err
}
slog.Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
return nil
}
@@ -301,22 +298,31 @@ func (s *Server) Start(ctx context.Context) error {
}
go func() {
slog.Info("Starting HTTP server", "port", httpPort, "addr", s.httpHostPort)
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort))
switch err := s.httpServer.Serve(s.httpConn); err {
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
// normal exit, nothing to do
default:
slog.Error("Could not start HTTP server", errors.Attr(err))
zap.L().Error("Could not start HTTP server", zap.Error(err))
}
s.unavailableChannel <- healthcheck.Unavailable
}()
go func() {
slog.Info("Starting OpAmp Websocket server", "addr", baseconst.OpAmpWsEndpoint)
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
if err != nil {
zap.L().Error("Could not start pprof server", zap.Error(err))
}
}()
go func() {
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
if err != nil {
slog.Error("opamp ws server failed to start", errors.Attr(err))
zap.L().Error("opamp ws server failed to start", zap.Error(err))
s.unavailableChannel <- healthcheck.Unavailable
}
}()
@@ -352,9 +358,10 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
MetadataStore: metadataStore,
Prometheus: prometheus,
Context: context.Background(),
Logger: zap.L(),
Reader: ch,
Querier: querier,
Logger: providerSettings.Logger,
SLogger: providerSettings.Logger,
Cache: cache,
EvalDelay: baseconst.GetEvalDelay(),
PrepareTaskFunc: rules.PrepareTaskFunc,
@@ -373,7 +380,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
return nil, fmt.Errorf("rule manager error: %v", err)
}
slog.Info("rules manager is ready")
zap.L().Info("rules manager is ready")
return manager, nil
}

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/ee/query-service/anomaly"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/query-service/common"
"github.com/SigNoz/signoz/pkg/query-service/model"
"github.com/SigNoz/signoz/pkg/transition"
@@ -309,7 +308,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
// In case of error we log the error and continue with the original series
if filterErr != nil {
r.logger.ErrorContext(ctx, "Error filtering new series, ", errors.Attr(filterErr), "rule_name", r.Name())
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
} else {
seriesToProcess = filteredSeries
}
@@ -392,7 +391,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
result, err := tmpl.Expand()
if err != nil {
result = fmt.Sprintf("<error expanding template: %s>", err)
r.logger.ErrorContext(ctx, "Expanding alert template failed", errors.Attr(err), "data", tmplData, "rule_name", r.Name())
r.logger.ErrorContext(ctx, "Expanding alert template failed", "error", err, "data", tmplData, "rule_name", r.Name())
}
return result
}
@@ -468,7 +467,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
for fp, a := range r.Active {
labelsJSON, err := json.Marshal(a.QueryResultLables)
if err != nil {
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "labels", a.Labels)
r.logger.ErrorContext(ctx, "error marshaling labels", "error", err, "labels", a.Labels)
}
if _, ok := resultFPs[fp]; !ok {
// If the alert was previously firing, keep it around for a given

View File

@@ -2,7 +2,6 @@ package rules
import (
"context"
"log/slog"
"testing"
"time"
@@ -117,7 +116,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
options := clickhouseReader.NewOptions("primaryNamespace")
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
rule, err := NewAnomalyRule(
"test-anomaly-rule",
@@ -248,7 +247,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
options := clickhouseReader.NewOptions("primaryNamespace")
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
require.NoError(t, err)

View File

@@ -6,16 +6,14 @@ import (
"time"
"log/slog"
"github.com/google/uuid"
"github.com/SigNoz/signoz/pkg/errors"
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
"github.com/SigNoz/signoz/pkg/types/ruletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/google/uuid"
"go.uber.org/zap"
)
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
@@ -36,7 +34,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
opts.Rule,
opts.Reader,
opts.Querier,
opts.Logger,
opts.SLogger,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
baserules.WithSQLStore(opts.SQLStore),
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
@@ -59,7 +57,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
ruleId,
opts.OrgID,
opts.Rule,
opts.Logger,
opts.SLogger,
opts.Reader,
opts.ManagerOpts.Prometheus,
baserules.WithSQLStore(opts.SQLStore),
@@ -84,7 +82,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
opts.Rule,
opts.Reader,
opts.Querier,
opts.Logger,
opts.SLogger,
opts.Cache,
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
baserules.WithSQLStore(opts.SQLStore),
@@ -144,7 +142,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
parsedRule,
opts.Reader,
opts.Querier,
opts.Logger,
opts.SLogger,
baserules.WithSendAlways(),
baserules.WithSendUnmatched(),
baserules.WithSQLStore(opts.SQLStore),
@@ -153,7 +151,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
)
if err != nil {
slog.Error("failed to prepare a new threshold rule for test", "name", alertname, errors.Attr(err))
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err))
return 0, basemodel.BadRequest(err)
}
@@ -164,7 +162,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
alertname,
opts.OrgID,
parsedRule,
opts.Logger,
opts.SLogger,
opts.Reader,
opts.ManagerOpts.Prometheus,
baserules.WithSendAlways(),
@@ -175,7 +173,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
)
if err != nil {
slog.Error("failed to prepare a new promql rule for test", "name", alertname, errors.Attr(err))
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err))
return 0, basemodel.BadRequest(err)
}
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
@@ -186,7 +184,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
parsedRule,
opts.Reader,
opts.Querier,
opts.Logger,
opts.SLogger,
opts.Cache,
baserules.WithSendAlways(),
baserules.WithSendUnmatched(),
@@ -195,7 +193,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
)
if err != nil {
slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, errors.Attr(err))
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
return 0, basemodel.BadRequest(err)
}
} else {
@@ -207,7 +205,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
alertsFound, err := rule.Eval(ctx, ts)
if err != nil {
slog.Error("evaluating rule failed", "rule", rule.Name(), errors.Attr(err))
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
}
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)

View File

@@ -80,21 +80,6 @@ func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
alertDataRows := cmock.NewRows(cols, tc.Values)
mock := telemetryStore.Mock()
// Mock metadata queries for FetchTemporalityAndTypeMulti
// First query: fetchMetricsTemporalityAndType (from signoz_metrics time series table)
metadataCols := []cmock.ColumnType{
{Name: "metric_name", Type: "String"},
{Name: "temporality", Type: "String"},
{Name: "type", Type: "String"},
{Name: "is_monotonic", Type: "Bool"},
}
metadataRows := cmock.NewRows(metadataCols, [][]any{
{"probe_success", metrictypes.Unspecified, metrictypes.GaugeType, false},
})
mock.ExpectQuery("*distributed_time_series_v4*").WithArgs(nil, nil, nil).WillReturnRows(metadataRows)
// Second query: fetchMeterSourceMetricsTemporalityAndType (from signoz_meter table)
emptyMetadataRows := cmock.NewRows(metadataCols, [][]any{})
mock.ExpectQuery("*meter*").WithArgs(nil).WillReturnRows(emptyMetadataRows)
// Generate query arguments for the metric query
evalTime := time.Now().UTC()

View File

@@ -8,14 +8,13 @@ import (
"sync/atomic"
"time"
"log/slog"
"github.com/ClickHouse/clickhouse-go/v2"
"github.com/go-co-op/gocron"
"github.com/google/uuid"
"go.uber.org/zap"
"github.com/SigNoz/signoz/ee/query-service/model"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/licensing"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
@@ -77,19 +76,19 @@ func (lm *Manager) Start(ctx context.Context) error {
func (lm *Manager) UploadUsage(ctx context.Context) {
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
if err != nil {
slog.ErrorContext(ctx, "failed to get organizations", errors.Attr(err))
zap.L().Error("failed to get organizations", zap.Error(err))
return
}
for _, organization := range organizations {
// check if license is present or not
license, err := lm.licenseService.GetActive(ctx, organization.ID)
if err != nil {
slog.ErrorContext(ctx, "failed to get active license", errors.Attr(err))
zap.L().Error("failed to get active license", zap.Error(err))
return
}
if license == nil {
// we will not start the usage reporting if license is not present.
slog.InfoContext(ctx, "no license present, skipping usage reporting")
zap.L().Info("no license present, skipping usage reporting")
return
}
@@ -116,7 +115,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
dbusages := []model.UsageDB{}
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
slog.ErrorContext(ctx, "failed to get usage from clickhouse", errors.Attr(err))
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
return
}
for _, u := range dbusages {
@@ -126,24 +125,24 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
}
if len(usages) <= 0 {
slog.InfoContext(ctx, "no snapshots to upload, skipping")
zap.L().Info("no snapshots to upload, skipping.")
return
}
slog.InfoContext(ctx, "uploading usage data")
zap.L().Info("uploading usage data")
usagesPayload := []model.Usage{}
for _, usage := range usages {
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
if err != nil {
slog.ErrorContext(ctx, "error while decrypting usage data", errors.Attr(err))
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
return
}
usageData := model.Usage{}
err = json.Unmarshal(usageDataBytes, &usageData)
if err != nil {
slog.ErrorContext(ctx, "error while unmarshalling usage data", errors.Attr(err))
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
return
}
@@ -164,13 +163,13 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
body, errv2 := json.Marshal(payload)
if errv2 != nil {
slog.ErrorContext(ctx, "error while marshalling usage payload", errors.Attr(errv2))
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
return
}
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
if errv2 != nil {
slog.ErrorContext(ctx, "failed to upload usage", errors.Attr(errv2))
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
// not returning error here since it is captured in the failed count
return
}
@@ -180,7 +179,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
func (lm *Manager) Stop(ctx context.Context) {
lm.scheduler.Stop()
slog.InfoContext(ctx, "sending usage data before shutting down")
zap.L().Info("sending usage data before shutting down")
// send usage before shutting down
lm.UploadUsage(ctx)
atomic.StoreUint32(&locker, stateUnlocked)

View File

@@ -4,12 +4,10 @@ import (
"context"
"database/sql"
"github.com/uptrace/bun"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
)
type provider struct {
@@ -34,9 +32,9 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
fmter: fmter,
settings: settings,
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
SCreateAndDropConstraint: true,
SAlterTableAddAndDropColumnIfNotExistsAndExists: true,
SAlterTableAlterColumnSetAndDrop: true,
DropConstraint: true,
ColumnIfNotExistsExists: true,
AlterColumnSetNotNull: true,
}),
}, nil
}
@@ -74,9 +72,8 @@ WHERE
if err != nil {
return nil, nil, err
}
if len(columns) == 0 {
return nil, nil, provider.sqlstore.WrapNotFoundErrf(sql.ErrNoRows, errors.CodeNotFound, "table (%s) not found", tableName)
return nil, nil, sql.ErrNoRows
}
sqlschemaColumns := make([]*sqlschema.Column, 0)
@@ -114,7 +111,7 @@ WHERE
defer func() {
if err := constraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
@@ -175,7 +172,7 @@ WHERE
defer func() {
if err := foreignKeyConstraintsRows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
@@ -223,9 +220,7 @@ SELECT
ci.relname AS index_name,
i.indisunique AS unique,
i.indisprimary AS primary,
a.attname AS column_name,
array_position(i.indkey, a.attnum) AS column_position,
pg_get_expr(i.indpred, i.indrelid) AS predicate
a.attname AS column_name
FROM
pg_index i
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
@@ -236,24 +231,18 @@ WHERE
a.attnum = ANY(i.indkey)
AND con.oid IS NULL
AND ct.relkind = 'r'
AND ct.relname = ?
ORDER BY index_name, column_position`, string(name))
AND ct.relname = ?`, string(name))
if err != nil {
return nil, provider.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "no indices for table (%s) found", name)
return nil, err
}
defer func() {
if err := rows.Close(); err != nil {
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
}
}()
type indexEntry struct {
columns []sqlschema.ColumnName
predicate *string
}
uniqueIndicesMap := make(map[string]*indexEntry)
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
for rows.Next() {
var (
tableName string
@@ -261,53 +250,27 @@ ORDER BY index_name, column_position`, string(name))
unique bool
primary bool
columnName string
// starts from 0 and is unused in this function, this is to ensure that the column names are in the correct order
columnPosition int
predicate *string
)
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName, &columnPosition, &predicate); err != nil {
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
return nil, err
}
if unique {
if _, ok := uniqueIndicesMap[indexName]; !ok {
uniqueIndicesMap[indexName] = &indexEntry{
columns: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
predicate: predicate,
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
TableName: name,
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
}
} else {
uniqueIndicesMap[indexName].columns = append(uniqueIndicesMap[indexName].columns, sqlschema.ColumnName(columnName))
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
}
}
}
indices := make([]sqlschema.Index, 0)
for indexName, entry := range uniqueIndicesMap {
if entry.predicate != nil {
index := &sqlschema.PartialUniqueIndex{
TableName: name,
ColumnNames: entry.columns,
Where: *entry.predicate,
}
if index.Name() == indexName {
indices = append(indices, index)
} else {
indices = append(indices, index.Named(indexName))
}
} else {
index := &sqlschema.UniqueIndex{
TableName: name,
ColumnNames: entry.columns,
}
if index.Name() == indexName {
indices = append(indices, index)
} else {
indices = append(indices, index.Named(indexName))
}
}
for _, index := range uniqueIndicesMap {
indices = append(indices, index)
}
return indices, nil

View File

@@ -101,7 +101,7 @@ func (provider *provider) WrapNotFoundErrf(err error, code errors.Code, format s
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
var pgErr *pgconn.PgError
if errors.As(err, &pgErr) && (pgErr.Code == "23505" || pgErr.Code == "23503") {
if errors.As(err, &pgErr) && pgErr.Code == "23505" {
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
}

View File

@@ -1,97 +0,0 @@
---
globs: **/*.store.ts
alwaysApply: false
---
# State Management: React Query, nuqs, Zustand
Use the following stack. Do **not** introduce or recommend Redux or React Context for shared/global state.
## Server state → React Query
- **Use for:** API responses, time-series data, caching, background refetch, retries, stale/refresh.
- **Do not use Redux/Context** to store or mirror data that comes from React Query (e.g. do not dispatch API results into Redux).
- Prefer generated React Query hooks from `frontend/src/api/generated` when available.
- Keep server state in React Query; expose it via hooks that return the query result (and optionally memoized derived values). Do not duplicate it in Redux or Context.
```tsx
// ✅ GOOD: single source of truth from React Query
export function useAppStateHook() {
const { data, isError } = useQuery(...)
const memoizedConfigs = useMemo(() => ({ ... }), [data?.configs])
return { configs: memoizedConfigs, isError, ... }
}
// ❌ BAD: copying React Query result into Redux
dispatch({ type: UPDATE_LATEST_VERSION, payload: queryResponse.data })
```
## URL state → nuqs
- **Use for:** shareable state, filters, time range, selected values, pagination, view state that belongs in the URL.
- **Do not use Redux/Context** for state that should be shareable or reflected in the URL.
- Use [nuqs](https://nuqs.dev/docs/basic-usage) for typed, type-safe URL search params. Avoid ad-hoc `useSearchParams` encoding/decoding.
- Keep URL payload small; respect browser URL length limits (e.g. Chrome ~2k chars). Do not put large datasets or sensitive data in query params.
```tsx
// ✅ GOOD: nuqs for filters / time range / selection
const [timeRange, setTimeRange] = useQueryState('timeRange', parseAsString.withDefault('1h'))
const [page, setPage] = useQueryState('page', parseAsInteger.withDefault(1))
// ❌ BAD: Redux/Context for shareable or URL-synced state
const { timeRange } = useContext(SomeContext)
```
## Client state → Zustand
- **Use for:** global/client state, cross-component state, feature flags, complex or large client objects (e.g. dashboard state, query builder state).
- **Do not use Redux or React Context** for global or feature-level client state.
- Prefer small, domain-scoped stores (e.g. DashboardStore, QueryBuilderStore).
### Zustand best practices (align with eslint-plugin-zustand-rules)
- **One store per module.** Do not define multiple `create()` calls in the same file; use one store per module (or compose slices into one store).
- **Always use selectors.** Call the store hook with a selector so only the used slice triggers re-renders. Never use `useStore()` with no selector.
```tsx
// ✅ GOOD: selector — re-renders only when isDashboardLocked changes
const isLocked = useDashboardStore(state => state.isDashboardLocked)
// ❌ BAD: no selector — re-renders on any store change
const state = useDashboardStore()
```
- **Never mutate state directly.** Update only via `set` or `setState` (or `getState()` + `set` for reads). No `state.foo = x` or `state.bears += 1` inside actions.
```tsx
// ✅ GOOD: use set
increment: () => set(state => ({ bears: state.bears + 1 }))
// ❌ BAD: direct mutation
increment: () => { state.bears += 1 }
```
- **State properties before actions.** In the store object, list all state fields first, then action functions.
- **Split into slices when state is large.** If a store has many top-level properties (e.g. more than 510), split into slice factories and combine with one `create()`.
```tsx
// ✅ GOOD: slices for large state
const createBearSlice = set => ({ bears: 0, addBear: () => set(s => ({ bears: s.bears + 1 })) })
const createFishSlice = set => ({ fish: 0, addFish: () => set(s => ({ fish: s.fish + 1 })) })
const useStore = create(set => ({ ...createBearSlice(set), ...createFishSlice(set) }))
```
- **In projects using Zustand:** add `eslint-plugin-zustand-rules` and extend `plugin:zustand-rules/recommended` to enforce these rules automatically.
## Local state → React state only
- **Use useState/useReducer** for: component-local UI state, form inputs, toggles, hover state, data that never leaves the component.
- Do not use Zustand, Redux, or Context for state that is purely local to one component or a small subtree.
## Summary
| State type | Use | Avoid |
|-------------------|------------------|--------------------|
| Server / API | React Query | Redux, Context |
| URL / shareable | nuqs | Redux, Context |
| Global client | Zustand | Redux, Context |
| Local UI | useState/useReducer | Zustand, Redux, Context |

View File

@@ -1,150 +0,0 @@
---
name: migrate-state-management
description: Migrate Redux or React Context to the correct state option (React Query for server state, nuqs for URL/shareable state, Zustand for global client state). Use when refactoring away from Redux/Context, moving state to the right store, or when the user asks to migrate state management.
---
# Migrate State: Redux/Context → React Query, nuqs, Zustand
Do **not** introduce or recommend Redux or React Context. Migrate existing usage to the stack below.
## 1. Classify the state
Before changing code, classify what the state represents:
| If the state is… | Migrate to | Do not use |
|------------------|------------|------------|
| From API / server (versions, configs, fetched lists, time-series) | **React Query** | Redux, Context |
| Shareable via URL (filters, time range, page, selected ids) | **nuqs** | Redux, Context |
| Global/client UI (dashboard lock, query builder, feature flags, large client objects) | **Zustand** | Redux, Context |
| Local to one component (inputs, toggles, hover) | **useState / useReducer** | Zustand, Redux, Context |
If one slice mixes concerns (e.g. Redux has both API data and pagination), split: API → React Query, pagination → nuqs, rest → Zustand or local state.
## 2. Migrate to React Query (server state)
**When:** State comes from or mirrors an API response (e.g. `currentVersion`, `latestVersion`, `configs`, lists).
**Steps:**
1. Find where the data is fetched (existing `useQuery`/API call) and where it is dispatched or set in Context/Redux.
2. Remove the dispatch/set that writes API results into Redux/Context.
3. Expose a single hook that uses the query and returns the same shape consumers expect (use `useMemo` for derived objects like `configs` to avoid unnecessary re-renders).
4. Replace Redux/Context consumption with the new hook. Prefer generated React Query hooks from `frontend/src/api/generated` when available.
5. Configure cache/refetch (e.g. `refetchOnMount: false`, `staleTime`) so behavior matches previous “single source” expectations.
**Before (Redux mirroring React Query):**
```tsx
if (getUserLatestVersionResponse.isFetched && getUserLatestVersionResponse.isSuccess && getUserLatestVersionResponse.data?.payload) {
dispatch({ type: UPDATE_LATEST_VERSION, payload: { latestVersion: getUserLatestVersionResponse.data.payload.tag_name } })
}
```
**After (single source in React Query):**
```tsx
export function useAppStateHook() {
const { data, isError } = useQuery(...)
const memoizedConfigs = useMemo(() => ({ ... }), [data?.configs])
return {
latestVersion: data?.payload?.tag_name,
configs: memoizedConfigs,
isError,
}
}
```
Consumers use `useAppStateHook()` instead of `useSelector` or Context. Do not copy React Query result into Redux or Context.
## 3. Migrate to nuqs (URL / shareable state)
**When:** State should be in the URL: filters, time range, pagination, selected values, view state. Keep payload small (e.g. Chrome ~2k chars); no large datasets or sensitive data.
**Steps:**
1. Identify which Redux/Context fields are shareable or already reflected in the URL (e.g. `currentPage`, `timeRange`, `selectedFilter`).
2. Add nuqs (or use existing): `useQueryState('param', parseAsString.withDefault('…'))` (or `parseAsInteger`, etc.).
3. Replace reads/writes of those fields with nuqs hooks. Use typed parsers; avoid ad-hoc `useSearchParams` encoding/decoding.
4. Remove the same fields from Redux/Context and their reducers/providers.
**Before (Context/Redux):**
```tsx
const { timeRange } = useContext(SomeContext)
const [page, setPage] = useDispatch(...)
```
**After (nuqs):**
```tsx
const [timeRange, setTimeRange] = useQueryState('timeRange', parseAsString.withDefault('1h'))
const [page, setPage] = useQueryState('page', parseAsInteger.withDefault(1))
```
## 4. Migrate to Zustand (global client state)
**When:** State is global or cross-component client state: feature flags, dashboard state, query builder state, complex/large client objects (e.g. up to ~1.52MB). Not for server cache or local-only UI.
**Steps:**
1. Create one store per domain (e.g. `DashboardStore`, `QueryBuilderStore`). One `create()` per module; for large state use slice factories and combine.
2. Put state properties first, then actions. Use `set` (or `setState` / `getState()` + `set`) for updates; never mutate state directly.
3. Replace Context/Redux consumption with the store hook **and a selector** so only the used slice triggers re-renders.
4. Remove the old Context provider / Redux slice and related dispatches.
**Selector (required):**
```tsx
const isLocked = useDashboardStore(state => state.isDashboardLocked)
```
Never use `useStore()` with no selector. Never do `state.foo = x` inside actions; use `set(state => ({ ... }))`.
**Before (Context/Redux):**
```tsx
const { isDashboardLocked, setLocked } = useContext(DashboardContext)
```
**After (Zustand):**
```tsx
const isLocked = useDashboardStore(state => state.isDashboardLocked)
const setLocked = useDashboardStore(state => state.setLocked)
```
For large stores (many top-level fields), split into slices and combine:
```tsx
const createBearSlice = set => ({ bears: 0, addBear: () => set(s => ({ bears: s.bears + 1 })) })
const useStore = create(set => ({ ...createBearSlice(set), ...createFishSlice(set) }))
```
Add `eslint-plugin-zustand-rules` with `plugin:zustand-rules/recommended` to enforce selectors and no direct mutation.
## 5. Migrate to local state (useState / useReducer)
**When:** State is used only inside one component or a small subtree (form inputs, toggles, hover, panel selection). No URL sync, no cross-feature sharing.
**Steps:**
1. Move the state into the component that owns it (or the smallest common parent).
2. Use `useState` or `useReducer` (useReducer when multiple related fields change together).
3. Remove from Redux/Context and any provider/slice.
Do not use Zustand, Redux, or Context for purely local UI state.
## 6. Migration checklist
- [ ] Classify each piece of state (server / URL / global client / local).
- [ ] Server state: move to React Query; expose via hook; remove Redux/Context mirroring.
- [ ] URL state: move to nuqs; remove from Redux/Context; keep URL payload small.
- [ ] Global client state: move to Zustand with selectors and immutable updates; one store per domain.
- [ ] Local state: move to useState/useReducer in the owning component.
- [ ] Remove old Redux slices / Context providers and all dispatches/consumers for migrated state.
- [ ] Do not duplicate the same data in multiple places (e.g. React Query + Redux).
## Additional resources
- Project rule: [.cursor/rules/state-management.mdc](../../rules/state-management.mdc)
- Detailed patterns and rationale: [reference.md](reference.md)

View File

@@ -1,50 +0,0 @@
# State migration reference
## Why migrate
- **Context:** Re-renders all consumers on any change; no granular subscriptions; becomes brittle at scale.
- **Redux:** Heavy boilerplate (actions, reducers, selectors, Provider); slower onboarding; often used to mirror React Query or URL state.
- **Goal:** Fewer mechanisms, domain isolation, granular subscriptions, single source of truth per state type.
## React Query migration (server state)
Typical anti-pattern: API is called via React Query, then result is dispatched to Redux. Flow becomes: Component → useQueries → API → dispatch → Reducer → Redux state → useSelector.
Correct flow: Component → useQuery (or custom hook wrapping it) → same component reads from hook. No Redux/Context in between.
- Prefer generated hooks from `frontend/src/api/generated`.
- For “app state” that is just API data (versions, configs), one hook that returns `{ ...data, configs: useMemo(...) }` is enough. No selectors needed for plain data; useMemo only where the value is used as dependency (e.g. in useState).
- Set `staleTime` / `refetchOnMount` etc. so refetch behavior matches previous expectations.
## nuqs migration (URL state)
Redux/Context often hold pagination, filters, time range, selected values that are shareable. Those belong in the URL.
- Use [nuqs](https://nuqs.dev/docs/basic-usage) for typed search params. Avoid ad-hoc `useSearchParams` + manual encoding.
- Browser limits: Chrome ~2k chars practical; keep payload small; no large datasets or secrets in query params.
- If the app uses TanStack Router, search params can be handled there; otherwise nuqs is the standard.
## Zustand migration (client state)
- One store per domain (e.g. DashboardStore, QueryBuilderStore). Multiple `create()` in one file is disallowed; use one store or composed slices.
- Always use a selector: `useStore(s => s.field)` so only that field drives re-renders.
- Never mutate: update only via `set(state => ({ ... }))` or `setState` / `getState()` + `set`.
- State properties first, then actions. For 510+ top-level fields, split into slice factories and combine with one `create()`.
- Large client objects: Zustand is for “large” in the ~1.52MB range; above that, optimize at API/store design.
- Testing: no Provider; stores are plain functions; easy to reset and mock.
## What not to use
- **Redux / Context** for new or migrated shared/global state.
- **Redux / Context** to store or mirror React Query results.
- **Redux / Context** for state that should live in the URL (use nuqs).
- **Zustand / Redux / Context** for component-local UI (use useState/useReducer).
## Summary table
| State type | Use | Avoid |
|-------------|--------------------|-----------------|
| Server/API | React Query | Redux, Context |
| URL/shareable | nuqs | Redux, Context |
| Global client | Zustand | Redux, Context |
| Local UI | useState/useReducer | Zustand, Redux, Context |

View File

@@ -193,16 +193,6 @@ module.exports = {
],
},
],
'no-restricted-syntax': [
'error',
{
selector:
// TODO: Make this generic on removal of redux
"CallExpression[callee.property.name='getState'][callee.object.name=/^use/]",
message:
'Avoid calling .getState() directly. Export a standalone action from the store instead.',
},
],
},
overrides: [
{
@@ -227,13 +217,5 @@ module.exports = {
'@typescript-eslint/no-unused-vars': 'warn',
},
},
{
// Store definition files are the only place .getState() is permitted —
// they are the canonical source for standalone action exports.
files: ['**/*Store.{ts,tsx}'],
rules: {
'no-restricted-syntax': 'off',
},
},
],
};

View File

@@ -1,29 +0,0 @@
import { PropsWithChildren } from 'react';
type CommonProps = PropsWithChildren<{
className?: string;
minSize?: number;
maxSize?: number;
defaultSize?: number;
direction?: 'horizontal' | 'vertical';
autoSaveId?: string;
withHandle?: boolean;
}>;
export function ResizablePanelGroup({
children,
className,
}: CommonProps): JSX.Element {
return <div className={className}>{children}</div>;
}
export function ResizablePanel({
children,
className,
}: CommonProps): JSX.Element {
return <div className={className}>{children}</div>;
}
export function ResizableHandle({ className }: CommonProps): JSX.Element {
return <div className={className} />;
}

View File

@@ -14,7 +14,6 @@ const config: Config.InitialOptions = {
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
'^@signozhq/resizable$': '<rootDir>/__mocks__/resizableMock.tsx',
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
@@ -24,8 +23,7 @@ const config: Config.InitialOptions = {
'<rootDir>/node_modules/@signozhq/icons/dist/index.esm.js',
'^react-syntax-highlighter/dist/esm/(.*)$':
'<rootDir>/node_modules/react-syntax-highlighter/dist/cjs/$1',
'^@signozhq/(?!ui$)([^/]+)$':
'<rootDir>/node_modules/@signozhq/$1/dist/$1.js',
'^@signozhq/([^/]+)$': '<rootDir>/node_modules/@signozhq/$1/dist/$1.js',
},
extensionsToTreatAsEsm: ['.ts'],
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],

View File

@@ -11,7 +11,6 @@
"prettify": "prettier --write .",
"fmt": "prettier --check .",
"lint": "eslint ./src",
"lint:generated": "eslint ./src/api/generated --fix",
"lint:fix": "eslint ./src --fix",
"jest": "jest",
"jest:coverage": "jest --coverage",
@@ -65,9 +64,8 @@
"@signozhq/sonner": "0.1.0",
"@signozhq/switch": "0.0.2",
"@signozhq/table": "0.3.7",
"@signozhq/toggle-group": "0.0.1",
"@signozhq/toggle-group": "^0.0.1",
"@signozhq/tooltip": "0.0.2",
"@signozhq/ui": "0.0.5",
"@tanstack/react-table": "8.20.6",
"@tanstack/react-virtual": "3.11.2",
"@uiw/codemirror-theme-copilot": "4.23.11",
@@ -137,7 +135,6 @@
"react-full-screen": "1.1.1",
"react-grid-layout": "^1.3.4",
"react-helmet-async": "1.3.0",
"react-hook-form": "7.71.2",
"react-i18next": "^11.16.1",
"react-lottie": "1.2.10",
"react-markdown": "8.0.7",
@@ -286,4 +283,4 @@
"tmp": "0.2.4",
"vite": "npm:rolldown-vite@7.3.1"
}
}
}

File diff suppressed because it is too large Load Diff

Before

Width:  |  Height:  |  Size: 214 KiB

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#fa520f" viewBox="0 0 24 24"><title>Mistral AI</title><path d="M17.143 3.429v3.428h-3.429v3.429h-3.428V6.857H6.857V3.43H3.43v13.714H0v3.428h10.286v-3.428H6.857v-3.429h3.429v3.429h3.429v-3.429h3.428v3.429h-3.428v3.428H24v-3.428h-3.43V3.429z"/></svg>

Before

Width:  |  Height:  |  Size: 294 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 120 120"><defs><linearGradient id="a" x1="0%" x2="100%" y1="0%" y2="100%"><stop offset="0%" stop-color="#ff4d4d"/><stop offset="100%" stop-color="#991b1b"/></linearGradient></defs><path fill="url(#a)" d="M60 10c-30 0-45 25-45 45s15 40 30 45v10h10v-10s5 2 10 0v10h10v-10c15-5 30-25 30-45S90 10 60 10"/><path fill="url(#a)" d="M20 45C5 40 0 50 5 60s15 5 20-5c3-7 0-10-5-10"/><path fill="url(#a)" d="M100 45c15-5 20 5 15 15s-15 5-20-5c-3-7 0-10 5-10"/><path stroke="#ff4d4d" stroke-linecap="round" stroke-width="3" d="M45 15Q35 5 30 8M75 15Q85 5 90 8"/><circle cx="45" cy="35" r="6" fill="#050810"/><circle cx="75" cy="35" r="6" fill="#050810"/><circle cx="46" cy="34" r="2.5" fill="#00e5cc"/><circle cx="76" cy="34" r="2.5" fill="#00e5cc"/></svg>

Before

Width:  |  Height:  |  Size: 809 B

View File

@@ -1 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><title>Render</title><path d="M18.263.007c-3.121-.147-5.744 2.109-6.192 5.082-.018.138-.045.272-.067.405-.696 3.703-3.936 6.507-7.827 6.507a7.9 7.9 0 0 1-3.825-.979.202.202 0 0 0-.302.178V24H12v-8.999c0-1.656 1.338-3 2.987-3h2.988c3.382 0 6.103-2.817 5.97-6.244-.12-3.084-2.61-5.603-5.682-5.75"/></svg>

Before

Width:  |  Height:  |  Size: 362 B

View File

@@ -15,6 +15,5 @@
"logs_to_metrics": "Logs To Metrics",
"roles": "Roles",
"role_details": "Role Details",
"members": "Members",
"service_accounts": "Service Accounts"
"members": "Members"
}

View File

@@ -50,8 +50,5 @@
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
"METER_EXPLORER": "SigNoz | Meter Explorer",
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
"METER": "SigNoz | Meter",
"ROLES_SETTINGS": "SigNoz | Roles",
"MEMBERS_SETTINGS": "SigNoz | Members",
"SERVICE_ACCOUNTS_SETTINGS": "SigNoz | Service Accounts"
"METER": "SigNoz | Meter"
}

View File

@@ -15,6 +15,5 @@
"logs_to_metrics": "Logs To Metrics",
"roles": "Roles",
"role_details": "Role Details",
"members": "Members",
"service_accounts": "Service Accounts"
"members": "Members"
}

View File

@@ -75,6 +75,5 @@
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
"METER": "SigNoz | Meter",
"ROLES_SETTINGS": "SigNoz | Roles",
"MEMBERS_SETTINGS": "SigNoz | Members",
"SERVICE_ACCOUNTS_SETTINGS": "SigNoz | Service Accounts"
"MEMBERS_SETTINGS": "SigNoz | Members"
}

View File

@@ -25,7 +25,7 @@ echo "\n✅ Prettier formatting successful"
# Fix linting issues
echo "\n\n---\nRunning eslint...\n"
if ! yarn lint:generated; then
if ! yarn lint --fix --quiet src/api/generated; then
echo "ESLint check failed! Please fix linting errors before proceeding."
exit 1
fi

View File

@@ -1,6 +1,6 @@
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { matchPath, Redirect, useLocation } from 'react-router-dom';
import { matchPath, useLocation } from 'react-router-dom';
import getLocalStorageApi from 'api/browser/localstorage/get';
import setLocalStorageApi from 'api/browser/localstorage/set';
import getAll from 'api/v1/user/get';
@@ -128,7 +128,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
isAdmin &&
(path === ROUTES.SETTINGS ||
path === ROUTES.ORG_SETTINGS ||
path === ROUTES.MEMBERS_SETTINGS ||
path === ROUTES.BILLING ||
path === ROUTES.MY_SETTINGS);
@@ -237,7 +236,13 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
useEffect(() => {
// if it is an old route navigate to the new route
if (isOldRoute) {
// this will be handled by the redirect component below
const redirectUrl = oldNewRoutesMapping[pathname];
const newLocation = {
...location,
pathname: redirectUrl,
};
history.replace(newLocation);
return;
}
@@ -291,19 +296,6 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
}
}, [isLoggedInState, pathname, user, isOldRoute, currentRoute, location]);
if (isOldRoute) {
const redirectUrl = oldNewRoutesMapping[pathname];
return (
<Redirect
to={{
pathname: redirectUrl,
search: location.search,
hash: location.hash,
}}
/>
);
}
// NOTE: disabling this rule as there is no need to have div
return <>{children}</>;
}

View File

@@ -29,6 +29,7 @@ import posthog from 'posthog-js';
import { useAppContext } from 'providers/App/App';
import { IUser } from 'providers/App/types';
import { CmdKProvider } from 'providers/cmdKProvider';
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
import { QueryBuilderProvider } from 'providers/QueryBuilder';
@@ -320,19 +321,6 @@ function App(): JSX.Element {
// Session Replay
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
beforeSend(event) {
const sessionReplayUrl = posthog.get_session_replay_url?.({
withTimestamp: true,
});
if (sessionReplayUrl) {
// eslint-disable-next-line no-param-reassign
event.contexts = {
...event.contexts,
posthog: { session_replay_url: sessionReplayUrl },
};
}
return event;
},
});
setIsSentryInitialized(true);
@@ -383,26 +371,28 @@ function App(): JSX.Element {
<PrivateRoute>
<ResourceProvider>
<QueryBuilderProvider>
<KeyboardHotkeysProvider>
<AppLayout>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
</AppLayout>
</KeyboardHotkeysProvider>
<DashboardProvider>
<KeyboardHotkeysProvider>
<AppLayout>
<PreferenceContextProvider>
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
<Switch>
{routes.map(({ path, component, exact }) => (
<Route
key={`${path}`}
exact={exact}
path={path}
component={component}
/>
))}
<Route exact path="/" component={Home} />
<Route path="*" component={NotFound} />
</Switch>
</Suspense>
</PreferenceContextProvider>
</AppLayout>
</KeyboardHotkeysProvider>
</DashboardProvider>
</QueryBuilderProvider>
</ResourceProvider>
</PrivateRoute>

File diff suppressed because it is too large Load Diff

View File

@@ -1,250 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import type {
InvalidateOptions,
QueryClient,
QueryFunction,
QueryKey,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import { useQuery } from 'react-query';
import type { ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
Healthz200,
Healthz503,
Livez200,
Readyz200,
Readyz503,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
/**
* @summary Health check
*/
export const healthz = (signal?: AbortSignal) => {
return GeneratedAPIInstance<Healthz200>({
url: `/api/v2/healthz`,
method: 'GET',
signal,
});
};
export const getHealthzQueryKey = () => {
return [`/api/v2/healthz`] as const;
};
export const getHealthzQueryOptions = <
TData = Awaited<ReturnType<typeof healthz>>,
TError = ErrorType<Healthz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof healthz>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getHealthzQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof healthz>>> = ({
signal,
}) => healthz(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof healthz>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type HealthzQueryResult = NonNullable<
Awaited<ReturnType<typeof healthz>>
>;
export type HealthzQueryError = ErrorType<Healthz503>;
/**
* @summary Health check
*/
export function useHealthz<
TData = Awaited<ReturnType<typeof healthz>>,
TError = ErrorType<Healthz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof healthz>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getHealthzQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Health check
*/
export const invalidateHealthz = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getHealthzQueryKey() },
options,
);
return queryClient;
};
/**
* @summary Liveness check
*/
export const livez = (signal?: AbortSignal) => {
return GeneratedAPIInstance<Livez200>({
url: `/api/v2/livez`,
method: 'GET',
signal,
});
};
export const getLivezQueryKey = () => {
return [`/api/v2/livez`] as const;
};
export const getLivezQueryOptions = <
TData = Awaited<ReturnType<typeof livez>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof livez>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getLivezQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof livez>>> = ({
signal,
}) => livez(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof livez>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type LivezQueryResult = NonNullable<Awaited<ReturnType<typeof livez>>>;
export type LivezQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Liveness check
*/
export function useLivez<
TData = Awaited<ReturnType<typeof livez>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof livez>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getLivezQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Liveness check
*/
export const invalidateLivez = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries({ queryKey: getLivezQueryKey() }, options);
return queryClient;
};
/**
* @summary Readiness check
*/
export const readyz = (signal?: AbortSignal) => {
return GeneratedAPIInstance<Readyz200>({
url: `/api/v2/readyz`,
method: 'GET',
signal,
});
};
export const getReadyzQueryKey = () => {
return [`/api/v2/readyz`] as const;
};
export const getReadyzQueryOptions = <
TData = Awaited<ReturnType<typeof readyz>>,
TError = ErrorType<Readyz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof readyz>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getReadyzQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof readyz>>> = ({
signal,
}) => readyz(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof readyz>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ReadyzQueryResult = NonNullable<Awaited<ReturnType<typeof readyz>>>;
export type ReadyzQueryError = ErrorType<Readyz503>;
/**
* @summary Readiness check
*/
export function useReadyz<
TData = Awaited<ReturnType<typeof readyz>>,
TError = ErrorType<Readyz503>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof readyz>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getReadyzQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Readiness check
*/
export const invalidateReadyz = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getReadyzQueryKey() },
options,
);
return queryClient;
};

View File

@@ -20,113 +20,11 @@ import { useMutation, useQuery } from 'react-query';
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
HandleExportRawDataPOSTParams,
ListPromotedAndIndexedPaths200,
PromotetypesPromotePathDTO,
Querybuildertypesv5QueryRangeRequestDTO,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
/**
* This endpoints allows complex query exporting raw data for traces and logs
* @summary Export raw data
*/
export const handleExportRawDataPOST = (
querybuildertypesv5QueryRangeRequestDTO: BodyType<Querybuildertypesv5QueryRangeRequestDTO>,
params?: HandleExportRawDataPOSTParams,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/export_raw_data`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: querybuildertypesv5QueryRangeRequestDTO,
params,
signal,
});
};
export const getHandleExportRawDataPOSTMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
> => {
const mutationKey = ['handleExportRawDataPOST'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
}
> = (props) => {
const { data, params } = props ?? {};
return handleExportRawDataPOST(data, params);
};
return { mutationFn, ...mutationOptions };
};
export type HandleExportRawDataPOSTMutationResult = NonNullable<
Awaited<ReturnType<typeof handleExportRawDataPOST>>
>;
export type HandleExportRawDataPOSTMutationBody = BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
export type HandleExportRawDataPOSTMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Export raw data
*/
export const useHandleExportRawDataPOST = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof handleExportRawDataPOST>>,
TError,
{
data: BodyType<Querybuildertypesv5QueryRangeRequestDTO>;
params?: HandleExportRawDataPOSTParams;
},
TContext
> => {
const mutationOptions = getHandleExportRawDataPOSTMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoints promotes and indexes paths
* @summary Promote and index paths

View File

@@ -21,8 +21,6 @@ import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
AuthtypesPatchableObjectsDTO,
AuthtypesPatchableRoleDTO,
AuthtypesPostableRoleDTO,
CreateRole201,
DeleteRolePathParameters,
GetObjects200,
@@ -33,6 +31,8 @@ import type {
PatchObjectsPathParameters,
PatchRolePathParameters,
RenderErrorResponseDTO,
RoletypesPatchableRoleDTO,
RoletypesPostableRoleDTO,
} from '../sigNoz.schemas';
/**
@@ -118,14 +118,14 @@ export const invalidateListRoles = async (
* @summary Create role
*/
export const createRole = (
authtypesPostableRoleDTO: BodyType<AuthtypesPostableRoleDTO>,
roletypesPostableRoleDTO: BodyType<RoletypesPostableRoleDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<CreateRole201>({
url: `/api/v1/roles`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: authtypesPostableRoleDTO,
data: roletypesPostableRoleDTO,
signal,
});
};
@@ -137,13 +137,13 @@ export const getCreateRoleMutationOptions = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: BodyType<AuthtypesPostableRoleDTO> },
{ data: BodyType<RoletypesPostableRoleDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: BodyType<AuthtypesPostableRoleDTO> },
{ data: BodyType<RoletypesPostableRoleDTO> },
TContext
> => {
const mutationKey = ['createRole'];
@@ -157,7 +157,7 @@ export const getCreateRoleMutationOptions = <
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof createRole>>,
{ data: BodyType<AuthtypesPostableRoleDTO> }
{ data: BodyType<RoletypesPostableRoleDTO> }
> = (props) => {
const { data } = props ?? {};
@@ -170,7 +170,7 @@ export const getCreateRoleMutationOptions = <
export type CreateRoleMutationResult = NonNullable<
Awaited<ReturnType<typeof createRole>>
>;
export type CreateRoleMutationBody = BodyType<AuthtypesPostableRoleDTO>;
export type CreateRoleMutationBody = BodyType<RoletypesPostableRoleDTO>;
export type CreateRoleMutationError = ErrorType<RenderErrorResponseDTO>;
/**
@@ -183,13 +183,13 @@ export const useCreateRole = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: BodyType<AuthtypesPostableRoleDTO> },
{ data: BodyType<RoletypesPostableRoleDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof createRole>>,
TError,
{ data: BodyType<AuthtypesPostableRoleDTO> },
{ data: BodyType<RoletypesPostableRoleDTO> },
TContext
> => {
const mutationOptions = getCreateRoleMutationOptions(options);
@@ -370,13 +370,13 @@ export const invalidateGetRole = async (
*/
export const patchRole = (
{ id }: PatchRolePathParameters,
authtypesPatchableRoleDTO: BodyType<AuthtypesPatchableRoleDTO>,
roletypesPatchableRoleDTO: BodyType<RoletypesPatchableRoleDTO>,
) => {
return GeneratedAPIInstance<string>({
url: `/api/v1/roles/${id}`,
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
data: authtypesPatchableRoleDTO,
data: roletypesPatchableRoleDTO,
});
};
@@ -389,7 +389,7 @@ export const getPatchRoleMutationOptions = <
TError,
{
pathParams: PatchRolePathParameters;
data: BodyType<AuthtypesPatchableRoleDTO>;
data: BodyType<RoletypesPatchableRoleDTO>;
},
TContext
>;
@@ -398,7 +398,7 @@ export const getPatchRoleMutationOptions = <
TError,
{
pathParams: PatchRolePathParameters;
data: BodyType<AuthtypesPatchableRoleDTO>;
data: BodyType<RoletypesPatchableRoleDTO>;
},
TContext
> => {
@@ -415,7 +415,7 @@ export const getPatchRoleMutationOptions = <
Awaited<ReturnType<typeof patchRole>>,
{
pathParams: PatchRolePathParameters;
data: BodyType<AuthtypesPatchableRoleDTO>;
data: BodyType<RoletypesPatchableRoleDTO>;
}
> = (props) => {
const { pathParams, data } = props ?? {};
@@ -429,7 +429,7 @@ export const getPatchRoleMutationOptions = <
export type PatchRoleMutationResult = NonNullable<
Awaited<ReturnType<typeof patchRole>>
>;
export type PatchRoleMutationBody = BodyType<AuthtypesPatchableRoleDTO>;
export type PatchRoleMutationBody = BodyType<RoletypesPatchableRoleDTO>;
export type PatchRoleMutationError = ErrorType<RenderErrorResponseDTO>;
/**
@@ -444,7 +444,7 @@ export const usePatchRole = <
TError,
{
pathParams: PatchRolePathParameters;
data: BodyType<AuthtypesPatchableRoleDTO>;
data: BodyType<RoletypesPatchableRoleDTO>;
},
TContext
>;
@@ -453,7 +453,7 @@ export const usePatchRole = <
TError,
{
pathParams: PatchRolePathParameters;
data: BodyType<AuthtypesPatchableRoleDTO>;
data: BodyType<RoletypesPatchableRoleDTO>;
},
TContext
> => {

File diff suppressed because it is too large Load Diff

View File

@@ -20,27 +20,33 @@ import { useMutation, useQuery } from 'react-query';
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type {
AcceptInvite201,
ChangePasswordPathParameters,
CreateAPIKey201,
CreateInvite201,
DeleteInvitePathParameters,
DeleteUserPathParameters,
GetInvite200,
GetInvitePathParameters,
GetMyUser200,
GetResetPasswordToken200,
GetResetPasswordTokenPathParameters,
GetUser200,
GetUserPathParameters,
ListAPIKeys200,
ListInvite200,
ListUsers200,
RenderErrorResponseDTO,
RevokeAPIKeyPathParameters,
TypesChangePasswordRequestDTO,
TypesDeprecatedUserDTO,
TypesPostableAcceptInviteDTO,
TypesPostableAPIKeyDTO,
TypesPostableBulkInviteRequestDTO,
TypesPostableForgotPasswordDTO,
TypesPostableInviteDTO,
TypesPostableResetPasswordDTO,
TypesStorableAPIKeyDTO,
TypesUserDTO,
UpdateAPIKeyPathParameters,
UpdateUser200,
UpdateUserPathParameters,
@@ -249,6 +255,84 @@ export const invalidateGetResetPasswordToken = async (
return queryClient;
};
/**
* This endpoint lists all invites
* @summary List invites
*/
export const listInvite = (signal?: AbortSignal) => {
return GeneratedAPIInstance<ListInvite200>({
url: `/api/v1/invite`,
method: 'GET',
signal,
});
};
export const getListInviteQueryKey = () => {
return [`/api/v1/invite`] as const;
};
export const getListInviteQueryOptions = <
TData = Awaited<ReturnType<typeof listInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof listInvite>>, TError, TData>;
}) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getListInviteQueryKey();
const queryFn: QueryFunction<Awaited<ReturnType<typeof listInvite>>> = ({
signal,
}) => listInvite(signal);
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
Awaited<ReturnType<typeof listInvite>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type ListInviteQueryResult = NonNullable<
Awaited<ReturnType<typeof listInvite>>
>;
export type ListInviteQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary List invites
*/
export function useListInvite<
TData = Awaited<ReturnType<typeof listInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof listInvite>>, TError, TData>;
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getListInviteQueryOptions(options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary List invites
*/
export const invalidateListInvite = async (
queryClient: QueryClient,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getListInviteQueryKey() },
options,
);
return queryClient;
};
/**
* This endpoint creates an invite for a user
* @summary Create invite
@@ -332,6 +416,257 @@ export const useCreateInvite = <
return useMutation(mutationOptions);
};
/**
* This endpoint deletes an invite by id
* @summary Delete invite
*/
export const deleteInvite = ({ id }: DeleteInvitePathParameters) => {
return GeneratedAPIInstance<void>({
url: `/api/v1/invite/${id}`,
method: 'DELETE',
});
};
export const getDeleteInviteMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
> => {
const mutationKey = ['deleteInvite'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof deleteInvite>>,
{ pathParams: DeleteInvitePathParameters }
> = (props) => {
const { pathParams } = props ?? {};
return deleteInvite(pathParams);
};
return { mutationFn, ...mutationOptions };
};
export type DeleteInviteMutationResult = NonNullable<
Awaited<ReturnType<typeof deleteInvite>>
>;
export type DeleteInviteMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Delete invite
*/
export const useDeleteInvite = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof deleteInvite>>,
TError,
{ pathParams: DeleteInvitePathParameters },
TContext
> => {
const mutationOptions = getDeleteInviteMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint gets an invite by token
* @summary Get invite
*/
export const getInvite = (
{ token }: GetInvitePathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetInvite200>({
url: `/api/v1/invite/${token}`,
method: 'GET',
signal,
});
};
export const getGetInviteQueryKey = ({ token }: GetInvitePathParameters) => {
return [`/api/v1/invite/${token}`] as const;
};
export const getGetInviteQueryOptions = <
TData = Awaited<ReturnType<typeof getInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ token }: GetInvitePathParameters,
options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey = queryOptions?.queryKey ?? getGetInviteQueryKey({ token });
const queryFn: QueryFunction<Awaited<ReturnType<typeof getInvite>>> = ({
signal,
}) => getInvite({ token }, signal);
return {
queryKey,
queryFn,
enabled: !!token,
...queryOptions,
} as UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData> & {
queryKey: QueryKey;
};
};
export type GetInviteQueryResult = NonNullable<
Awaited<ReturnType<typeof getInvite>>
>;
export type GetInviteQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get invite
*/
export function useGetInvite<
TData = Awaited<ReturnType<typeof getInvite>>,
TError = ErrorType<RenderErrorResponseDTO>
>(
{ token }: GetInvitePathParameters,
options?: {
query?: UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetInviteQueryOptions({ token }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get invite
*/
export const invalidateGetInvite = async (
queryClient: QueryClient,
{ token }: GetInvitePathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetInviteQueryKey({ token }) },
options,
);
return queryClient;
};
/**
* This endpoint accepts an invite by token
* @summary Accept invite
*/
export const acceptInvite = (
typesPostableAcceptInviteDTO: BodyType<TypesPostableAcceptInviteDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<AcceptInvite201>({
url: `/api/v1/invite/accept`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: typesPostableAcceptInviteDTO,
signal,
});
};
export const getAcceptInviteMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
> => {
const mutationKey = ['acceptInvite'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof acceptInvite>>,
{ data: BodyType<TypesPostableAcceptInviteDTO> }
> = (props) => {
const { data } = props ?? {};
return acceptInvite(data);
};
return { mutationFn, ...mutationOptions };
};
export type AcceptInviteMutationResult = NonNullable<
Awaited<ReturnType<typeof acceptInvite>>
>;
export type AcceptInviteMutationBody = BodyType<TypesPostableAcceptInviteDTO>;
export type AcceptInviteMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Accept invite
*/
export const useAcceptInvite = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof acceptInvite>>,
TError,
{ data: BodyType<TypesPostableAcceptInviteDTO> },
TContext
> => {
const mutationOptions = getAcceptInviteMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This endpoint creates a bulk invite for a user
* @summary Create bulk invite
@@ -1093,13 +1428,13 @@ export const invalidateGetUser = async (
*/
export const updateUser = (
{ id }: UpdateUserPathParameters,
typesDeprecatedUserDTO: BodyType<TypesDeprecatedUserDTO>,
typesUserDTO: BodyType<TypesUserDTO>,
) => {
return GeneratedAPIInstance<UpdateUser200>({
url: `/api/v1/user/${id}`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: typesDeprecatedUserDTO,
data: typesUserDTO,
});
};
@@ -1110,19 +1445,13 @@ export const getUpdateUserMutationOptions = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof updateUser>>,
TError,
{
pathParams: UpdateUserPathParameters;
data: BodyType<TypesDeprecatedUserDTO>;
},
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof updateUser>>,
TError,
{
pathParams: UpdateUserPathParameters;
data: BodyType<TypesDeprecatedUserDTO>;
},
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
TContext
> => {
const mutationKey = ['updateUser'];
@@ -1136,10 +1465,7 @@ export const getUpdateUserMutationOptions = <
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof updateUser>>,
{
pathParams: UpdateUserPathParameters;
data: BodyType<TypesDeprecatedUserDTO>;
}
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> }
> = (props) => {
const { pathParams, data } = props ?? {};
@@ -1152,7 +1478,7 @@ export const getUpdateUserMutationOptions = <
export type UpdateUserMutationResult = NonNullable<
Awaited<ReturnType<typeof updateUser>>
>;
export type UpdateUserMutationBody = BodyType<TypesDeprecatedUserDTO>;
export type UpdateUserMutationBody = BodyType<TypesUserDTO>;
export type UpdateUserMutationError = ErrorType<RenderErrorResponseDTO>;
/**
@@ -1165,19 +1491,13 @@ export const useUpdateUser = <
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof updateUser>>,
TError,
{
pathParams: UpdateUserPathParameters;
data: BodyType<TypesDeprecatedUserDTO>;
},
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof updateUser>>,
TError,
{
pathParams: UpdateUserPathParameters;
data: BodyType<TypesDeprecatedUserDTO>;
},
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
TContext
> => {
const mutationOptions = getUpdateUserMutationOptions(options);

View File

@@ -81,8 +81,7 @@ export const interceptorRejected = async (
response.config.url !== '/sessions/email_password' &&
!(
response.config.url === '/sessions' && response.config.method === 'delete'
) &&
response.config.url !== '/authz/check'
)
) {
try {
const accessToken = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN);

View File

@@ -1,152 +0,0 @@
import axios, { AxiosHeaders, AxiosResponse } from 'axios';
import { interceptorRejected } from './index';
jest.mock('api/browser/localstorage/get', () => ({
__esModule: true,
default: jest.fn(() => 'mock-token'),
}));
jest.mock('api/v2/sessions/rotate/post', () => ({
__esModule: true,
default: jest.fn(() =>
Promise.resolve({
data: { accessToken: 'new-token', refreshToken: 'new-refresh' },
}),
),
}));
jest.mock('AppRoutes/utils', () => ({
__esModule: true,
default: jest.fn(),
}));
jest.mock('axios', () => {
const actualAxios = jest.requireActual('axios');
const mockAxios = jest.fn().mockResolvedValue({ data: 'success' });
return {
...actualAxios,
default: Object.assign(mockAxios, {
...actualAxios.default,
isAxiosError: jest.fn().mockReturnValue(true),
create: actualAxios.create,
}),
__esModule: true,
};
});
describe('interceptorRejected', () => {
beforeEach(() => {
jest.clearAllMocks();
((axios as unknown) as jest.Mock).mockResolvedValue({ data: 'success' });
((axios.isAxiosError as unknown) as jest.Mock).mockReturnValue(true);
});
it('should preserve array payload structure when retrying a 401 request', async () => {
const arrayPayload = [
{ relation: 'assignee', object: { resource: { name: 'role' } } },
{ relation: 'assignee', object: { resource: { name: 'editor' } } },
];
const error = ({
response: {
status: 401,
config: {
url: '/some-endpoint',
method: 'POST',
baseURL: 'http://localhost/',
headers: new AxiosHeaders(),
data: JSON.stringify(arrayPayload),
},
},
config: {
url: '/some-endpoint',
method: 'POST',
baseURL: 'http://localhost/',
headers: new AxiosHeaders(),
data: JSON.stringify(arrayPayload),
},
} as unknown) as AxiosResponse;
try {
await interceptorRejected(error);
} catch {
// Expected to reject after retry
}
const mockAxiosFn = (axios as unknown) as jest.Mock;
expect(mockAxiosFn.mock.calls.length).toBe(1);
const retryCallConfig = mockAxiosFn.mock.calls[0][0];
expect(Array.isArray(JSON.parse(retryCallConfig.data))).toBe(true);
expect(JSON.parse(retryCallConfig.data)).toEqual(arrayPayload);
});
it('should preserve object payload structure when retrying a 401 request', async () => {
const objectPayload = { key: 'value', nested: { data: 123 } };
const error = ({
response: {
status: 401,
config: {
url: '/some-endpoint',
method: 'POST',
baseURL: 'http://localhost/',
headers: new AxiosHeaders(),
data: JSON.stringify(objectPayload),
},
},
config: {
url: '/some-endpoint',
method: 'POST',
baseURL: 'http://localhost/',
headers: new AxiosHeaders(),
data: JSON.stringify(objectPayload),
},
} as unknown) as AxiosResponse;
try {
await interceptorRejected(error);
} catch {
// Expected to reject after retry
}
const mockAxiosFn = (axios as unknown) as jest.Mock;
expect(mockAxiosFn.mock.calls.length).toBe(1);
const retryCallConfig = mockAxiosFn.mock.calls[0][0];
expect(JSON.parse(retryCallConfig.data)).toEqual(objectPayload);
});
it('should handle undefined data gracefully when retrying', async () => {
const error = ({
response: {
status: 401,
config: {
url: '/some-endpoint',
method: 'GET',
baseURL: 'http://localhost/',
headers: new AxiosHeaders(),
data: undefined,
},
},
config: {
url: '/some-endpoint',
method: 'GET',
baseURL: 'http://localhost/',
headers: new AxiosHeaders(),
data: undefined,
},
} as unknown) as AxiosResponse;
try {
await interceptorRejected(error);
} catch {
// Expected to reject after retry
}
const mockAxiosFn = (axios as unknown) as jest.Mock;
expect(mockAxiosFn.mock.calls.length).toBe(1);
const retryCallConfig = mockAxiosFn.mock.calls[0][0];
expect(retryCallConfig.data).toBeUndefined();
});
});

View File

@@ -8,32 +8,42 @@ export const downloadExportData = async (
props: ExportRawDataProps,
): Promise<void> => {
try {
const response = await axios.post<Blob>(
`export_raw_data?format=${encodeURIComponent(props.format)}`,
props.body,
{
responseType: 'blob',
decompress: true,
headers: {
Accept: 'application/octet-stream',
'Content-Type': 'application/json',
},
timeout: 0,
},
);
const queryParams = new URLSearchParams();
queryParams.append('start', String(props.start));
queryParams.append('end', String(props.end));
queryParams.append('filter', props.filter);
props.columns.forEach((col) => {
queryParams.append('columns', col);
});
queryParams.append('order_by', props.orderBy);
queryParams.append('limit', String(props.limit));
queryParams.append('format', props.format);
const response = await axios.get<Blob>(`export_raw_data?${queryParams}`, {
responseType: 'blob', // Important: tell axios to handle response as blob
decompress: true, // Enable automatic decompression
headers: {
Accept: 'application/octet-stream', // Tell server we expect binary data
},
timeout: 0,
});
// Only proceed if the response status is 200
if (response.status !== 200) {
throw new Error(
`Failed to download data: server returned status ${response.status}`,
);
}
// Create blob URL from response data
const blob = new Blob([response.data], { type: 'application/octet-stream' });
const url = window.URL.createObjectURL(blob);
// Create and configure download link
const link = document.createElement('a');
link.href = url;
// Get filename from Content-Disposition header or generate timestamped default
const filename =
response.headers['content-disposition']
?.split('filename=')[1]
@@ -41,6 +51,7 @@ export const downloadExportData = async (
link.setAttribute('download', filename);
// Trigger download
document.body.appendChild(link);
link.click();
link.remove();

View File

@@ -0,0 +1,19 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, PendingInvite } from 'types/api/user/getPendingInvites';
const get = async (): Promise<SuccessResponseV2<PendingInvite[]>> => {
try {
const response = await axios.get<PayloadProps>(`/invite`);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default get;

View File

@@ -0,0 +1,22 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { PayloadProps, Props } from 'types/api/user/accept';
import { UserResponse } from 'types/api/user/getUser';
const accept = async (
props: Props,
): Promise<SuccessResponseV2<UserResponse>> => {
try {
const response = await axios.post<PayloadProps>(`/invite/accept`, props);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default accept;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/user/deleteInvite';
const del = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete(`/invite/${props.id}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default del;

View File

@@ -0,0 +1,28 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import {
InviteDetails,
PayloadProps,
Props,
} from 'types/api/user/getInviteDetails';
const getInviteDetails = async (
props: Props,
): Promise<SuccessResponseV2<InviteDetails>> => {
try {
const response = await axios.get<PayloadProps>(
`/invite/${props.inviteId}?ref=${window.location.href}`,
);
return {
httpStatusCode: response.status,
data: response.data.data,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default getInviteDetails;

View File

@@ -0,0 +1,20 @@
import axios from 'api';
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
import { AxiosError } from 'axios';
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
import { Props } from 'types/api/user/deleteUser';
const deleteUser = async (props: Props): Promise<SuccessResponseV2<null>> => {
try {
const response = await axios.delete(`/user/${props.userId}`);
return {
httpStatusCode: response.status,
data: null,
};
} catch (error) {
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
}
};
export default deleteUser;

View File

@@ -1,14 +1,8 @@
function UnAuthorized({
width = 137,
height = 137,
}: {
height?: number;
width?: number;
}): JSX.Element {
function UnAuthorized(): JSX.Element {
return (
<svg
width={width}
height={height}
width="137"
height="137"
viewBox="0 0 137 137"
fill="none"
xmlns="http://www.w3.org/2000/svg"

View File

@@ -30,4 +30,3 @@ import '@signozhq/switch';
import '@signozhq/table';
import '@signozhq/toggle-group';
import '@signozhq/tooltip';
import '@signozhq/ui';

View File

@@ -1,97 +0,0 @@
.announcement-banner {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--spacing-4);
padding: var(--padding-2) var(--padding-4);
height: 40px;
font-family: var(--font-sans), sans-serif;
font-size: var(--label-base-500-font-size);
line-height: var(--label-base-500-line-height);
font-weight: var(--label-base-500-font-weight);
letter-spacing: -0.065px;
&--warning {
background-color: var(--callout-warning-background);
color: var(--callout-warning-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-warning-border);
}
}
&--info {
background-color: var(--callout-primary-background);
color: var(--callout-primary-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-primary-border);
}
}
&--error {
background-color: var(--callout-error-background);
color: var(--callout-error-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-error-border);
}
}
&--success {
background-color: var(--callout-success-background);
color: var(--callout-success-description);
.announcement-banner__action,
.announcement-banner__dismiss {
background: var(--callout-success-border);
}
}
&__body {
display: flex;
align-items: center;
gap: var(--spacing-4);
flex: 1;
min-width: 0;
}
&__icon {
display: flex;
align-items: center;
flex-shrink: 0;
}
&__message {
flex: 1;
min-width: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
line-height: var(--line-height-normal);
strong {
font-weight: var(--font-weight-semibold);
}
}
&__action {
height: 24px;
font-size: var(--label-small-500-font-size);
color: currentColor;
&:hover {
opacity: 0.8;
}
}
&__dismiss {
width: 24px;
height: 24px;
padding: 0;
color: currentColor;
&:hover {
opacity: 0.8;
}
}
}

View File

@@ -1,89 +0,0 @@
import { render, screen, userEvent } from 'tests/test-utils';
import {
AnnouncementBanner,
AnnouncementBannerProps,
PersistedAnnouncementBanner,
} from './index';
const STORAGE_KEY = 'test-banner-dismissed';
function renderBanner(props: Partial<AnnouncementBannerProps> = {}): void {
render(<AnnouncementBanner message="Test message" {...props} />);
}
afterEach(() => {
localStorage.removeItem(STORAGE_KEY);
});
describe('AnnouncementBanner', () => {
it('renders message and default warning variant', () => {
renderBanner({ message: <strong>Heads up</strong> });
const alert = screen.getByRole('alert');
expect(alert).toHaveClass('announcement-banner--warning');
expect(alert).toHaveTextContent('Heads up');
});
it.each(['warning', 'info', 'success', 'error'] as const)(
'renders %s variant correctly',
(type) => {
renderBanner({ type, message: 'Test message' });
const alert = screen.getByRole('alert');
expect(alert).toHaveClass(`announcement-banner--${type}`);
},
);
it('calls action onClick when action button is clicked', async () => {
const onClick = jest.fn() as jest.MockedFunction<() => void>;
renderBanner({ action: { label: 'Go to Settings', onClick } });
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /go to settings/i }));
expect(onClick).toHaveBeenCalledTimes(1);
});
it('hides dismiss button when onClose is not provided and hides icon when icon is null', () => {
renderBanner({ onClose: undefined, icon: null });
expect(
screen.queryByRole('button', { name: /dismiss/i }),
).not.toBeInTheDocument();
expect(
screen.queryByRole('alert')?.querySelector('.announcement-banner__icon'),
).not.toBeInTheDocument();
});
});
describe('PersistedAnnouncementBanner', () => {
it('dismisses on click, calls onDismiss, and persists to localStorage', async () => {
const onDismiss = jest.fn() as jest.MockedFunction<() => void>;
render(
<PersistedAnnouncementBanner
message="Test message"
storageKey={STORAGE_KEY}
onDismiss={onDismiss}
/>,
);
const user = userEvent.setup({ pointerEventsCheck: 0 });
await user.click(screen.getByRole('button', { name: /dismiss/i }));
expect(screen.queryByRole('alert')).not.toBeInTheDocument();
expect(onDismiss).toHaveBeenCalledTimes(1);
expect(localStorage.getItem(STORAGE_KEY)).toBe('true');
});
it('does not render when storageKey is already set in localStorage', () => {
localStorage.setItem(STORAGE_KEY, 'true');
render(
<PersistedAnnouncementBanner
message="Test message"
storageKey={STORAGE_KEY}
/>,
);
expect(screen.queryByRole('alert')).not.toBeInTheDocument();
});
});

View File

@@ -1,84 +0,0 @@
import { ReactNode } from 'react';
import { Button } from '@signozhq/button';
import {
CircleAlert,
CircleCheckBig,
Info,
TriangleAlert,
X,
} from '@signozhq/icons';
import cx from 'classnames';
import './AnnouncementBanner.styles.scss';
export type AnnouncementBannerType = 'warning' | 'info' | 'error' | 'success';
export interface AnnouncementBannerAction {
label: string;
onClick: () => void;
}
export interface AnnouncementBannerProps {
message: ReactNode;
type?: AnnouncementBannerType;
icon?: ReactNode | null;
action?: AnnouncementBannerAction;
onClose?: () => void;
className?: string;
}
const DEFAULT_ICONS: Record<AnnouncementBannerType, ReactNode> = {
warning: <TriangleAlert size={14} />,
info: <Info size={14} />,
error: <CircleAlert size={14} />,
success: <CircleCheckBig size={14} />,
};
export default function AnnouncementBanner({
message,
type = 'warning',
icon,
action,
onClose,
className,
}: AnnouncementBannerProps): JSX.Element {
const resolvedIcon = icon === null ? null : icon ?? DEFAULT_ICONS[type];
return (
<div
role="alert"
className={cx(
'announcement-banner',
`announcement-banner--${type}`,
className,
)}
>
<div className="announcement-banner__body">
{resolvedIcon && (
<span className="announcement-banner__icon">{resolvedIcon}</span>
)}
<span className="announcement-banner__message">{message}</span>
{action && (
<Button
type="button"
className="announcement-banner__action"
onClick={action.onClick}
>
{action.label}
</Button>
)}
</div>
{onClose && (
<Button
type="button"
aria-label="Dismiss"
className="announcement-banner__dismiss"
onClick={onClose}
>
<X size={14} />
</Button>
)}
</div>
);
}

View File

@@ -1,34 +0,0 @@
import { useState } from 'react';
import AnnouncementBanner, {
AnnouncementBannerProps,
} from './AnnouncementBanner';
interface PersistedAnnouncementBannerProps extends AnnouncementBannerProps {
storageKey: string;
onDismiss?: () => void;
}
function isDismissed(storageKey: string): boolean {
return localStorage.getItem(storageKey) === 'true';
}
export default function PersistedAnnouncementBanner({
storageKey,
onDismiss,
...props
}: PersistedAnnouncementBannerProps): JSX.Element | null {
const [visible, setVisible] = useState(() => !isDismissed(storageKey));
if (!visible) {
return null;
}
const handleClose = (): void => {
localStorage.setItem(storageKey, 'true');
setVisible(false);
onDismiss?.();
};
return <AnnouncementBanner {...props} onClose={handleClose} />;
}

View File

@@ -1,12 +0,0 @@
import AnnouncementBanner from './AnnouncementBanner';
import PersistedAnnouncementBanner from './PersistedAnnouncementBanner';
export type {
AnnouncementBannerAction,
AnnouncementBannerProps,
AnnouncementBannerType,
} from './AnnouncementBanner';
export { AnnouncementBanner, PersistedAnnouncementBanner };
export default AnnouncementBanner;

View File

@@ -7,7 +7,7 @@ import ROUTES from 'constants/routes';
import useUpdatedQuery from 'container/GridCardLayout/useResolveQuery';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import { useNotifications } from 'hooks/useNotifications';
import { useDashboardStore } from 'providers/Dashboard/store/useDashboardStore';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { AppState } from 'store/reducers';
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource, MetricAggregateOperator } from 'types/common/queryBuilder';
@@ -79,7 +79,7 @@ export function useNavigateToExplorer(): (
);
const { getUpdatedQuery } = useUpdatedQuery();
const { selectedDashboard } = useDashboardStore();
const { selectedDashboard } = useDashboard();
const { notifications } = useNotifications();
return useCallback(

View File

@@ -1,106 +0,0 @@
.create-sa-modal {
max-width: 530px;
background: var(--popover);
border: 1px solid var(--secondary);
border-radius: 4px;
box-shadow: 0 4px 9px 0 rgba(0, 0, 0, 0.04);
[data-slot='dialog-header'] {
padding: var(--padding-4);
border-bottom: 1px solid var(--secondary);
flex-shrink: 0;
background: transparent;
margin: 0;
}
[data-slot='dialog-title'] {
font-size: var(--label-base-400-font-size);
font-weight: var(--label-base-400-font-weight);
line-height: var(--label-base-400-line-height);
letter-spacing: -0.065px;
color: var(--bg-base-white);
margin: 0;
}
[data-slot='dialog-description'] {
padding: 0;
.create-sa-modal__content {
padding: var(--padding-4);
}
}
}
.create-sa-form {
display: flex;
flex-direction: column;
gap: var(--spacing-2);
&__item {
display: flex;
flex-direction: column;
gap: var(--spacing-1);
margin-bottom: var(--spacing-4);
> label {
font-size: var(--paragraph-base-400-font-size);
font-weight: var(--paragraph-base-400-font-weight);
color: var(--foreground);
letter-spacing: -0.07px;
}
}
&__input {
height: 32px;
color: var(--l1-foreground);
background-color: var(--l2-background);
border-color: var(--border);
font-size: var(--paragraph-base-400-font-size);
border-radius: 2px;
width: 100%;
&::placeholder {
color: var(--l3-foreground);
}
&:focus {
border-color: var(--primary);
box-shadow: none;
}
}
&__error {
font-size: var(--paragraph-small-400-font-size);
color: var(--destructive);
line-height: var(--paragraph-small-400-line-height);
margin: 0;
}
&__helper {
font-size: var(--paragraph-small-400-font-size);
color: var(--l3-foreground);
margin: calc(var(--spacing-2) * -1) 0 var(--spacing-4) 0;
line-height: var(--paragraph-small-400-line-height);
}
}
.create-sa-modal__footer {
display: flex;
flex-direction: row;
align-items: center;
justify-content: flex-end;
padding: 0 var(--padding-4);
height: 56px;
min-height: 56px;
border-top: 1px solid var(--secondary);
gap: var(--spacing-4);
flex-shrink: 0;
}
.lightMode {
.create-sa-modal {
[data-slot='dialog-title'] {
color: var(--bg-base-black);
}
}
}

View File

@@ -1,230 +0,0 @@
import { Controller, useForm } from 'react-hook-form';
import { useQueryClient } from 'react-query';
import { Button } from '@signozhq/button';
import { DialogFooter, DialogWrapper } from '@signozhq/dialog';
import { X } from '@signozhq/icons';
import { Input } from '@signozhq/input';
import { toast } from '@signozhq/sonner';
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
import {
invalidateListServiceAccounts,
useCreateServiceAccount,
} from 'api/generated/services/serviceaccount';
import type { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
import { AxiosError } from 'axios';
import RolesSelect, { useRoles } from 'components/RolesSelect';
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
import { parseAsBoolean, useQueryState } from 'nuqs';
import { EMAIL_REGEX } from 'utils/app';
import './CreateServiceAccountModal.styles.scss';
interface FormValues {
name: string;
email: string;
roles: string[];
}
function CreateServiceAccountModal(): JSX.Element {
const queryClient = useQueryClient();
const [isOpen, setIsOpen] = useQueryState(
SA_QUERY_PARAMS.CREATE_SA,
parseAsBoolean.withDefault(false),
);
const {
control,
handleSubmit,
reset,
formState: { isValid, errors },
} = useForm<FormValues>({
mode: 'onChange',
defaultValues: {
name: '',
email: '',
roles: [],
},
});
const {
mutate: createServiceAccount,
isLoading: isSubmitting,
} = useCreateServiceAccount({
mutation: {
onSuccess: async () => {
toast.success('Service account created successfully', {
richColors: true,
});
reset();
await setIsOpen(null);
await invalidateListServiceAccounts(queryClient);
},
onError: (err) => {
const errMessage =
convertToApiError(
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
)?.getErrorMessage() || 'An error occurred';
toast.error(`Failed to create service account: ${errMessage}`, {
richColors: true,
});
},
},
});
const {
roles,
isLoading: rolesLoading,
isError: rolesError,
error: rolesErrorObj,
refetch: refetchRoles,
} = useRoles();
function handleClose(): void {
reset();
setIsOpen(null);
}
function handleCreate(values: FormValues): void {
createServiceAccount({
data: {
name: values.name.trim(),
email: values.email.trim(),
roles: values.roles,
},
});
}
return (
<DialogWrapper
title="New Service Account"
open={isOpen}
onOpenChange={(open): void => {
if (!open) {
handleClose();
}
}}
showCloseButton
width="narrow"
className="create-sa-modal"
disableOutsideClick={false}
>
<div className="create-sa-modal__content">
<form
id="create-sa-form"
className="create-sa-form"
onSubmit={handleSubmit(handleCreate)}
>
<div className="create-sa-form__item">
<label htmlFor="sa-name">Name</label>
<Controller
name="name"
control={control}
rules={{ required: 'Name is required' }}
render={({ field }): JSX.Element => (
<Input
id="sa-name"
placeholder="Enter a name"
className="create-sa-form__input"
value={field.value}
onChange={field.onChange}
onBlur={field.onBlur}
/>
)}
/>
{errors.name && (
<p className="create-sa-form__error">{errors.name.message}</p>
)}
</div>
<div className="create-sa-form__item">
<label htmlFor="sa-email">Email Address</label>
<Controller
name="email"
control={control}
rules={{
required: 'Email Address is required',
pattern: {
value: EMAIL_REGEX,
message: 'Please enter a valid email address',
},
}}
render={({ field }): JSX.Element => (
<Input
id="sa-email"
type="email"
placeholder="email@example.com"
className="create-sa-form__input"
value={field.value}
onChange={field.onChange}
onBlur={field.onBlur}
/>
)}
/>
{errors.email && (
<p className="create-sa-form__error">{errors.email.message}</p>
)}
</div>
<p className="create-sa-form__helper">
Used only for notifications about this service account. It is not used for
authentication.
</p>
<div className="create-sa-form__item">
<label htmlFor="sa-roles">Roles</label>
<Controller
name="roles"
control={control}
rules={{
validate: (value): string | true =>
value.length > 0 || 'At least one role is required',
}}
render={({ field }): JSX.Element => (
<RolesSelect
id="sa-roles"
mode="multiple"
roles={roles}
loading={rolesLoading}
isError={rolesError}
error={rolesErrorObj}
onRefetch={refetchRoles}
placeholder="Select roles"
value={field.value}
onChange={field.onChange}
/>
)}
/>
{errors.roles && (
<p className="create-sa-form__error">{errors.roles.message}</p>
)}
</div>
</form>
</div>
<DialogFooter className="create-sa-modal__footer">
<Button
type="button"
variant="solid"
color="secondary"
size="sm"
onClick={handleClose}
>
<X size={12} />
Cancel
</Button>
<Button
type="submit"
form="create-sa-form"
variant="solid"
color="primary"
size="sm"
loading={isSubmitting}
disabled={!isValid}
>
Create Service Account
</Button>
</DialogFooter>
</DialogWrapper>
);
}
export default CreateServiceAccountModal;

View File

@@ -1,179 +0,0 @@
import { toast } from '@signozhq/sonner';
import { listRolesSuccessResponse } from 'mocks-server/__mockdata__/roles';
import { rest, server } from 'mocks-server/server';
import { NuqsTestingAdapter } from 'nuqs/adapters/testing';
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
import CreateServiceAccountModal from '../CreateServiceAccountModal';
jest.mock('@signozhq/sonner', () => ({
toast: { success: jest.fn(), error: jest.fn() },
}));
const mockToast = jest.mocked(toast);
const ROLES_ENDPOINT = '*/api/v1/roles';
const SERVICE_ACCOUNTS_ENDPOINT = '*/api/v1/service_accounts';
function renderModal(): ReturnType<typeof render> {
return render(
<NuqsTestingAdapter searchParams={{ 'create-sa': 'true' }} hasMemory>
<CreateServiceAccountModal />
</NuqsTestingAdapter>,
);
}
describe('CreateServiceAccountModal', () => {
beforeEach(() => {
jest.clearAllMocks();
server.use(
rest.get(ROLES_ENDPOINT, (_, res, ctx) =>
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
),
rest.post(SERVICE_ACCOUNTS_ENDPOINT, (_, res, ctx) =>
res(ctx.status(201), ctx.json({ status: 'success', data: {} })),
),
);
});
afterEach(() => {
server.resetHandlers();
});
it('submit button is disabled when form is empty', () => {
renderModal();
expect(
screen.getByRole('button', { name: /Create Service Account/i }),
).toBeDisabled();
});
it('submit button remains disabled when email is invalid', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderModal();
await user.type(screen.getByPlaceholderText('Enter a name'), 'My Bot');
await user.type(
screen.getByPlaceholderText('email@example.com'),
'not-an-email',
);
await user.click(screen.getByText('Select roles'));
await user.click(await screen.findByTitle('signoz-admin'));
await waitFor(() =>
expect(
screen.getByRole('button', { name: /Create Service Account/i }),
).toBeDisabled(),
);
});
it('successful submit shows toast.success and closes modal', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderModal();
await user.type(screen.getByPlaceholderText('Enter a name'), 'Deploy Bot');
await user.type(
screen.getByPlaceholderText('email@example.com'),
'deploy@acme.io',
);
await user.click(screen.getByText('Select roles'));
await user.click(await screen.findByTitle('signoz-admin'));
const submitBtn = screen.getByRole('button', {
name: /Create Service Account/i,
});
await waitFor(() => expect(submitBtn).not.toBeDisabled());
await user.click(submitBtn);
await waitFor(() => {
expect(mockToast.success).toHaveBeenCalledWith(
'Service account created successfully',
expect.anything(),
);
});
await waitFor(() => {
expect(
screen.queryByRole('dialog', { name: /New Service Account/i }),
).not.toBeInTheDocument();
});
});
it('shows toast.error on API error and keeps modal open', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.post(SERVICE_ACCOUNTS_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(500),
ctx.json({ status: 'error', error: 'Internal Server Error' }),
),
),
);
renderModal();
await user.type(screen.getByPlaceholderText('Enter a name'), 'Dupe Bot');
await user.type(
screen.getByPlaceholderText('email@example.com'),
'dupe@acme.io',
);
await user.click(screen.getByText('Select roles'));
await user.click(await screen.findByTitle('signoz-admin'));
const submitBtn = screen.getByRole('button', {
name: /Create Service Account/i,
});
await waitFor(() => expect(submitBtn).not.toBeDisabled());
await user.click(submitBtn);
await waitFor(() => {
expect(mockToast.error).toHaveBeenCalledWith(
expect.stringMatching(/Failed to create service account/i),
expect.anything(),
);
});
expect(
screen.getByRole('dialog', { name: /New Service Account/i }),
).toBeInTheDocument();
});
it('Cancel button closes modal without submitting', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderModal();
await screen.findByRole('dialog', { name: /New Service Account/i });
await user.click(screen.getByRole('button', { name: /Cancel/i }));
expect(
screen.queryByRole('dialog', { name: /New Service Account/i }),
).not.toBeInTheDocument();
});
it('shows "Name is required" after clearing the name field', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderModal();
const nameInput = screen.getByPlaceholderText('Enter a name');
await user.type(nameInput, 'Bot');
await user.clear(nameInput);
await screen.findByText('Name is required');
});
it('shows "Please enter a valid email address" for a malformed email', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
renderModal();
await user.type(
screen.getByPlaceholderText('email@example.com'),
'not-an-email',
);
await screen.findByText('Please enter a valid email address');
});
});

View File

@@ -1,31 +1,6 @@
.custom-time-picker {
display: flex;
flex-direction: row;
align-items: center;
gap: 4px;
.zoom-out-btn {
display: flex;
align-items: center;
justify-content: center;
flex-shrink: 0;
color: var(--foreground);
border: 1px solid var(--border);
border-radius: 2px;
box-shadow: none;
padding: 10px;
height: 33px;
&:hover:not(:disabled) {
color: var(--bg-vanilla-100);
background: var(--primary);
}
&:disabled {
opacity: 0.5;
cursor: not-allowed;
}
}
flex-direction: column;
.timeSelection-input {
&:hover {

View File

@@ -16,15 +16,6 @@ jest.mock('react-router-dom', () => {
};
});
jest.mock('react-redux', () => ({
...jest.requireActual('react-redux'),
useDispatch: jest.fn(() => jest.fn()),
useSelector: jest.fn(() => ({
minTime: 0,
maxTime: Date.now(),
})),
}));
jest.mock('providers/Timezone', () => {
const actual = jest.requireActual('providers/Timezone');

View File

@@ -7,11 +7,9 @@ import {
useState,
} from 'react';
import { useLocation } from 'react-router-dom';
import { Button } from '@signozhq/button';
import { Input, InputRef, Popover, Tooltip } from 'antd';
import cx from 'classnames';
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
import { QueryParams } from 'constants/query';
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
import {
FixedDurationSuggestionOptions,
@@ -19,11 +17,9 @@ import {
RelativeDurationSuggestionOptions,
} from 'container/TopNav/DateTimeSelectionV2/constants';
import dayjs from 'dayjs';
import { useZoomOut } from 'hooks/useZoomOut';
import { isValidShortHandDateTimeFormat } from 'lib/getMinMax';
import { isZoomOutDisabled } from 'lib/zoomOutUtils';
import { defaultTo, isFunction, noop } from 'lodash-es';
import { ChevronDown, ChevronUp, ZoomOut } from 'lucide-react';
import { ChevronDown, ChevronUp } from 'lucide-react';
import { useTimezone } from 'providers/Timezone';
import { getTimeDifference, validateEpochRange } from 'utils/epochUtils';
import { popupContainer } from 'utils/selectPopupContainer';
@@ -70,8 +66,6 @@ interface CustomTimePickerProps {
showRecentlyUsed?: boolean;
minTime: number;
maxTime: number;
/** When true, zoom-out button is hidden (e.g. in drawer/modal time selection) */
isModalTimeSelection?: boolean;
}
function CustomTimePicker({
@@ -94,7 +88,6 @@ function CustomTimePicker({
showRecentlyUsed = true,
minTime,
maxTime,
isModalTimeSelection = false,
}: CustomTimePickerProps): JSX.Element {
const [
selectedTimePlaceholderValue,
@@ -123,14 +116,6 @@ function CustomTimePicker({
const [isOpenedFromFooter, setIsOpenedFromFooter] = useState(false);
const durationMs = (maxTime - minTime) / 1e6;
const zoomOutDisabled = showLiveLogs || isZoomOutDisabled(durationMs);
const handleZoomOut = useZoomOut({
isDisabled: zoomOutDisabled,
urlParamsToDelete: [QueryParams.activeLogId],
});
// function to get selected time in Last 1m, Last 2h, Last 3d, Last 4w format
// 1m, 2h, 3d, 4w -> Last 1 minute, Last 2 hours, Last 3 days, Last 4 weeks
const getSelectedTimeRangeLabelInRelativeFormat = (
@@ -297,11 +282,7 @@ function CustomTimePicker({
resetErrorStatus();
};
const handleInputPressEnter = (
event?: React.KeyboardEvent<HTMLInputElement>,
): void => {
event?.preventDefault();
event?.stopPropagation();
const handleInputPressEnter = (): void => {
// check if the entered time is in the format of 1m, 2h, 3d, 4w
const isTimeDurationShortHandFormat = /^(\d+)([mhdw])$/.test(inputValue);
@@ -650,23 +631,6 @@ function CustomTimePicker({
/>
</Popover>
</Tooltip>
{!showLiveLogs && !isModalTimeSelection && (
<Tooltip
title={
zoomOutDisabled ? 'Zoom out time range is limited to 1 month' : 'Zoom out'
}
>
<span>
<Button
className="zoom-out-btn"
onClick={handleZoomOut}
disabled={zoomOutDisabled}
data-testid="zoom-out-btn"
prefixIcon={<ZoomOut size={14} />}
/>
</span>
</Tooltip>
)}
</div>
);
}

View File

@@ -1,169 +0,0 @@
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { QueryParams } from 'constants/query';
import { GlobalReducer } from 'types/reducer/globalTime';
import CustomTimePicker from '../CustomTimePicker';
const MS_PER_MIN = 60 * 1000;
const NOW_MS = 1705312800000;
const mockDispatch = jest.fn();
const mockSafeNavigate = jest.fn();
const mockUrlQueryDelete = jest.fn();
const mockUrlQuerySet = jest.fn();
interface MockAppState {
globalTime: Pick<GlobalReducer, 'minTime' | 'maxTime'>;
}
jest.mock('react-redux', () => ({
useDispatch: (): jest.Mock => mockDispatch,
useSelector: (selector: (state: MockAppState) => unknown): unknown => {
const mockState: MockAppState = {
globalTime: {
minTime: (NOW_MS - 15 * MS_PER_MIN) * 1e6,
maxTime: NOW_MS * 1e6,
},
};
return selector(mockState);
},
}));
jest.mock('hooks/useSafeNavigate', () => ({
useSafeNavigate: (): { safeNavigate: jest.Mock } => ({
safeNavigate: mockSafeNavigate,
}),
}));
interface MockUrlQuery {
delete: typeof mockUrlQueryDelete;
set: typeof mockUrlQuerySet;
get: () => null;
toString: () => string;
}
jest.mock('hooks/useUrlQuery', () => ({
__esModule: true,
default: (): MockUrlQuery => ({
delete: mockUrlQueryDelete,
set: mockUrlQuerySet,
get: (): null => null,
toString: (): string => 'relativeTime=45m',
}),
}));
jest.mock('providers/Timezone', () => ({
useTimezone: (): { timezone: { value: string; offset: string } } => ({
timezone: { value: 'UTC', offset: 'UTC' },
}),
}));
jest.mock('react-router-dom', () => ({
useLocation: (): { pathname: string } => ({ pathname: '/logs-explorer' }),
}));
const MS_PER_DAY = 24 * 60 * 60 * 1000;
const now = Date.now();
const defaultProps = {
onSelect: jest.fn(),
onError: jest.fn(),
selectedValue: '15m',
selectedTime: '15m',
onValidCustomDateChange: jest.fn(),
open: false,
setOpen: jest.fn(),
items: [
{ value: '15m', label: 'Last 15 minutes' },
{ value: '1h', label: 'Last 1 hour' },
],
minTime: (now - 15 * 60 * 1000) * 1e6,
maxTime: now * 1e6,
};
describe('CustomTimePicker - zoom out button', () => {
beforeEach(() => {
jest.clearAllMocks();
jest.spyOn(Date, 'now').mockReturnValue(NOW_MS);
});
afterEach(() => {
jest.restoreAllMocks();
});
it('should render zoom out button when showLiveLogs is false', () => {
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
expect(screen.getByTestId('zoom-out-btn')).toBeInTheDocument();
});
it('should not render zoom out button when showLiveLogs is true', () => {
render(<CustomTimePicker {...defaultProps} showLiveLogs={true} />);
expect(screen.queryByTestId('zoom-out-btn')).not.toBeInTheDocument();
});
it('should not render zoom out button when isModalTimeSelection is true', () => {
render(
<CustomTimePicker
{...defaultProps}
showLiveLogs={false}
isModalTimeSelection={true}
/>,
);
expect(screen.queryByTestId('zoom-out-btn')).not.toBeInTheDocument();
});
it('should call handleZoomOut when zoom out button is clicked', async () => {
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
await userEvent.click(zoomOutBtn);
expect(mockDispatch).toHaveBeenCalled();
expect(mockUrlQuerySet).toHaveBeenCalledWith(QueryParams.relativeTime, '45m');
expect(mockSafeNavigate).toHaveBeenCalledWith(
expect.stringMatching(/\/logs-explorer\?relativeTime=45m/),
);
});
it('should use real ladder logic: 15m range zooms to 45m preset and updates URL', async () => {
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
await userEvent.click(zoomOutBtn);
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.startTime);
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.endTime);
expect(mockUrlQuerySet).toHaveBeenCalledWith(QueryParams.relativeTime, '45m');
expect(mockSafeNavigate).toHaveBeenCalledWith(
expect.stringMatching(/\/logs-explorer\?relativeTime=45m/),
);
expect(mockDispatch).toHaveBeenCalled();
});
it('should delete activeLogId when zoom out is clicked', async () => {
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
await userEvent.click(zoomOutBtn);
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.activeLogId);
});
it('should disable zoom button when time range is >= 1 month', () => {
const now = Date.now();
render(
<CustomTimePicker
{...defaultProps}
minTime={(now - 31 * MS_PER_DAY) * 1e6}
maxTime={now * 1e6}
showLiveLogs={false}
/>,
);
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
expect(zoomOutBtn).toBeDisabled();
});
});

View File

@@ -1,331 +0,0 @@
// eslint-disable-next-line no-restricted-imports
import { Provider } from 'react-redux';
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import { message } from 'antd';
import configureStore from 'redux-mock-store';
import store from 'store';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
import { EQueryType } from 'types/common/dashboard';
import { DataSource, StringOperators } from 'types/common/queryBuilder';
import '@testing-library/jest-dom';
import { DownloadFormats, DownloadRowCounts } from './constants';
import DownloadOptionsMenu from './DownloadOptionsMenu';
const mockDownloadExportData = jest.fn().mockResolvedValue(undefined);
jest.mock('api/v1/download/downloadExportData', () => ({
downloadExportData: (...args: any[]): any => mockDownloadExportData(...args),
default: (...args: any[]): any => mockDownloadExportData(...args),
}));
jest.mock('antd', () => {
const actual = jest.requireActual('antd');
return {
...actual,
message: {
success: jest.fn(),
error: jest.fn(),
},
};
});
const mockUseQueryBuilder = jest.fn();
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: (): any => mockUseQueryBuilder(),
}));
const mockStore = configureStore([]);
const createMockReduxStore = (): any =>
mockStore({
...store.getState(),
});
const createMockStagedQuery = (dataSource: DataSource): Query => ({
id: 'test-query-id',
queryType: EQueryType.QUERY_BUILDER,
builder: {
queryData: [
{
queryName: 'A',
dataSource,
aggregateOperator: StringOperators.NOOP,
aggregateAttribute: {
id: '',
dataType: '' as any,
key: '',
type: '',
},
aggregations: [{ expression: 'count()' }],
functions: [],
filter: { expression: 'status = 200' },
filters: { items: [], op: 'AND' },
groupBy: [],
expression: 'A',
disabled: false,
having: { expression: '' } as any,
limit: null,
stepInterval: null,
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
legend: '',
selectColumns: [],
},
],
queryFormulas: [],
queryTraceOperator: [],
},
promql: [],
clickhouse_sql: [],
});
const renderWithStore = (dataSource: DataSource): void => {
const mockReduxStore = createMockReduxStore();
render(
<Provider store={mockReduxStore}>
<DownloadOptionsMenu dataSource={dataSource} />
</Provider>,
);
};
describe.each([
[DataSource.LOGS, 'logs'],
[DataSource.TRACES, 'traces'],
])('DownloadOptionsMenu for %s', (dataSource, signal) => {
const testId = `periscope-btn-download-${dataSource}`;
beforeEach(() => {
mockDownloadExportData.mockReset().mockResolvedValue(undefined);
(message.success as jest.Mock).mockReset();
(message.error as jest.Mock).mockReset();
mockUseQueryBuilder.mockReturnValue({
stagedQuery: createMockStagedQuery(dataSource),
});
});
it('renders download button', () => {
renderWithStore(dataSource);
const button = screen.getByTestId(testId);
expect(button).toBeInTheDocument();
expect(button).toHaveClass('periscope-btn', 'ghost');
});
it('shows popover with export options when download button is clicked', () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
expect(screen.getByRole('dialog')).toBeInTheDocument();
expect(screen.getByText('FORMAT')).toBeInTheDocument();
expect(screen.getByText('Number of Rows')).toBeInTheDocument();
expect(screen.getByText('Columns')).toBeInTheDocument();
});
it('allows changing export format', () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
const csvRadio = screen.getByRole('radio', { name: 'csv' });
const jsonlRadio = screen.getByRole('radio', { name: 'jsonl' });
expect(csvRadio).toBeChecked();
fireEvent.click(jsonlRadio);
expect(jsonlRadio).toBeChecked();
expect(csvRadio).not.toBeChecked();
});
it('allows changing row limit', () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
const tenKRadio = screen.getByRole('radio', { name: '10k' });
const fiftyKRadio = screen.getByRole('radio', { name: '50k' });
expect(tenKRadio).toBeChecked();
fireEvent.click(fiftyKRadio);
expect(fiftyKRadio).toBeChecked();
expect(tenKRadio).not.toBeChecked();
});
it('allows changing columns scope', () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
const allColumnsRadio = screen.getByRole('radio', { name: 'All' });
const selectedColumnsRadio = screen.getByRole('radio', { name: 'Selected' });
expect(allColumnsRadio).toBeChecked();
fireEvent.click(selectedColumnsRadio);
expect(selectedColumnsRadio).toBeChecked();
expect(allColumnsRadio).not.toBeChecked();
});
it('calls downloadExportData with correct format and POST body', async () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
const callArgs = mockDownloadExportData.mock.calls[0][0];
expect(callArgs.format).toBe(DownloadFormats.CSV);
expect(callArgs.body).toBeDefined();
expect(callArgs.body.requestType).toBe('raw');
expect(callArgs.body.compositeQuery.queries).toHaveLength(1);
const query = callArgs.body.compositeQuery.queries[0];
expect(query.type).toBe('builder_query');
expect(query.spec.signal).toBe(signal);
expect(query.spec.limit).toBe(DownloadRowCounts.TEN_K);
});
});
it('clears groupBy and having in the export payload', async () => {
const mockQuery = createMockStagedQuery(dataSource);
mockQuery.builder.queryData[0].groupBy = [
{ key: 'service', dataType: 'string' as any, type: '' },
];
mockQuery.builder.queryData[0].having = {
expression: 'count() > 10',
} as any;
mockUseQueryBuilder.mockReturnValue({ stagedQuery: mockQuery });
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
const callArgs = mockDownloadExportData.mock.calls[0][0];
const query = callArgs.body.compositeQuery.queries[0];
expect(query.spec.groupBy).toBeUndefined();
expect(query.spec.having).toEqual({ expression: '' });
});
});
it('keeps selectColumns when column scope is Selected', async () => {
const mockQuery = createMockStagedQuery(dataSource);
mockQuery.builder.queryData[0].selectColumns = [
{ name: 'http.status', fieldDataType: 'int64', fieldContext: 'attribute' },
] as any;
mockUseQueryBuilder.mockReturnValue({ stagedQuery: mockQuery });
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
const callArgs = mockDownloadExportData.mock.calls[0][0];
const query = callArgs.body.compositeQuery.queries[0];
expect(query.spec.selectFields).toEqual([
expect.objectContaining({
name: 'http.status',
fieldDataType: 'int64',
}),
]);
});
});
it('sends no selectFields when column scope is All', async () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByRole('radio', { name: 'All' }));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
const callArgs = mockDownloadExportData.mock.calls[0][0];
const query = callArgs.body.compositeQuery.queries[0];
expect(query.spec.selectFields).toBeUndefined();
});
});
it('handles successful export with success message', async () => {
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(message.success).toHaveBeenCalledWith(
'Export completed successfully',
);
});
});
it('handles export failure with error message', async () => {
mockDownloadExportData.mockRejectedValueOnce(new Error('Server error'));
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(message.error).toHaveBeenCalledWith(
`Failed to export ${dataSource}. Please try again.`,
);
});
});
it('handles UI state correctly during export process', async () => {
let resolveDownload: () => void;
mockDownloadExportData.mockImplementationOnce(
() =>
new Promise<void>((resolve) => {
resolveDownload = resolve;
}),
);
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
expect(screen.getByRole('dialog')).toBeInTheDocument();
fireEvent.click(screen.getByText('Export'));
expect(screen.getByTestId(testId)).toBeDisabled();
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
resolveDownload!();
await waitFor(() => {
expect(screen.getByTestId(testId)).not.toBeDisabled();
});
});
});
describe('DownloadOptionsMenu for traces with queryTraceOperator', () => {
const dataSource = DataSource.TRACES;
const testId = `periscope-btn-download-${dataSource}`;
beforeEach(() => {
mockDownloadExportData.mockReset().mockResolvedValue(undefined);
(message.success as jest.Mock).mockReset();
});
it('applies limit and clears groupBy on queryTraceOperator entries', async () => {
const query = createMockStagedQuery(dataSource);
query.builder.queryTraceOperator = [
{
...query.builder.queryData[0],
queryName: 'TraceOp1',
expression: 'TraceOp1',
groupBy: [{ key: 'service', dataType: 'string' as any, type: '' }],
},
];
mockUseQueryBuilder.mockReturnValue({ stagedQuery: query });
renderWithStore(dataSource);
fireEvent.click(screen.getByTestId(testId));
fireEvent.click(screen.getByRole('radio', { name: '50k' }));
fireEvent.click(screen.getByText('Export'));
await waitFor(() => {
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
const callArgs = mockDownloadExportData.mock.calls[0][0];
const queries = callArgs.body.compositeQuery.queries;
const traceOpQuery = queries.find((q: any) => q.spec.name === 'TraceOp1');
if (traceOpQuery) {
expect(traceOpQuery.spec.limit).toBe(DownloadRowCounts.FIFTY_K);
expect(traceOpQuery.spec.groupBy).toBeUndefined();
}
});
});
});

Some files were not shown because too many files have changed in this diff Show More