mirror of
https://github.com/SigNoz/signoz.git
synced 2026-04-30 15:40:27 +01:00
Compare commits
112 Commits
debug_time
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94f432a3db | ||
|
|
79b4c2e4b0 | ||
|
|
866e541e29 | ||
|
|
b1efb66197 | ||
|
|
0276496669 | ||
|
|
b0eec8132b | ||
|
|
efbeca23cf | ||
|
|
6f3a2e6a83 | ||
|
|
807211b8d8 | ||
|
|
b811991f9d | ||
|
|
2b1da9aac2 | ||
|
|
a321ef8de8 | ||
|
|
d9f8a4ae5a | ||
|
|
95ed125bd9 | ||
|
|
1a85ccb373 | ||
|
|
2d8a00bf18 | ||
|
|
f1b26b310f | ||
|
|
78b0836974 | ||
|
|
7df1f25bcd | ||
|
|
a5f8c199a5 | ||
|
|
dfe2a6a9e5 | ||
|
|
16a915be94 | ||
|
|
cc6f2170a5 | ||
|
|
4ffab5f580 | ||
|
|
644228735b | ||
|
|
29ec71b98f | ||
|
|
ca9cbd92e4 | ||
|
|
0faef8705d | ||
|
|
2ca9085b52 | ||
|
|
b7d0c8b5a2 | ||
|
|
ce5499d5a7 | ||
|
|
4554a09a42 | ||
|
|
794a7f4ca6 | ||
|
|
fd3b1c5374 | ||
|
|
e52c5683dd | ||
|
|
90e3cb6775 | ||
|
|
155f287462 | ||
|
|
c8fcc48022 | ||
|
|
44b6885639 | ||
|
|
0e5a128325 | ||
|
|
d6caa4f2c7 | ||
|
|
fd19ff8e5e | ||
|
|
7b9e93162f | ||
|
|
f86371566d | ||
|
|
9115803084 | ||
|
|
0c14d8f966 | ||
|
|
f106f57097 | ||
|
|
7afb461af8 | ||
|
|
a21fbb4ee0 | ||
|
|
0369842f3d | ||
|
|
59cd96562a | ||
|
|
cc4475cab7 | ||
|
|
5bafdeb373 | ||
|
|
24b72084ac | ||
|
|
2db83b453d | ||
|
|
ac8c648420 | ||
|
|
bede6be4b8 | ||
|
|
dd3d60e6df | ||
|
|
538ab686d2 | ||
|
|
936a325cb9 | ||
|
|
c6cdcd0143 | ||
|
|
2f012715b4 | ||
|
|
cd9211d718 | ||
|
|
aa05a7bf14 | ||
|
|
99327960b0 | ||
|
|
12b02a1002 | ||
|
|
4ce220ba92 | ||
|
|
0211ddf0cb | ||
|
|
0601c28782 | ||
|
|
580610dbfa | ||
|
|
e5eb62e45b | ||
|
|
7371dcacf0 | ||
|
|
3cdf3e06f3 | ||
|
|
2d2aa02a81 | ||
|
|
dd9723ad13 | ||
|
|
3651469416 | ||
|
|
febce75734 | ||
|
|
f8c38df2bf | ||
|
|
e1616f3487 | ||
|
|
4b94287ac7 | ||
|
|
1575c7c54c | ||
|
|
cab4a56694 | ||
|
|
78041fe457 | ||
|
|
09b6382820 | ||
|
|
9689b847f0 | ||
|
|
15e5938e95 | ||
|
|
8def3f835b | ||
|
|
11ed15f4c5 | ||
|
|
f47877cca9 | ||
|
|
bb2b9215ba | ||
|
|
3111904223 | ||
|
|
003e2c30d8 | ||
|
|
00fe516d10 | ||
|
|
c5ef455283 | ||
|
|
2316b5be83 | ||
|
|
937ebc1582 | ||
|
|
dcc8173c79 | ||
|
|
0305f4f7db | ||
|
|
4b4ef5ce58 | ||
|
|
5b8d5fbfd3 | ||
|
|
c60019a6dc | ||
|
|
acde2a37fa | ||
|
|
945241a52a | ||
|
|
e967f80c86 | ||
|
|
a09dc325de | ||
|
|
379b4f7fc4 | ||
|
|
5e536ae077 | ||
|
|
234585e642 | ||
|
|
2cc14f1ad4 | ||
|
|
dc4ed4d239 | ||
|
|
7281c36873 | ||
|
|
40288776e8 |
19
.github/CODEOWNERS
vendored
19
.github/CODEOWNERS
vendored
@@ -1,8 +1,6 @@
|
|||||||
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
|
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
|
||||||
|
|
||||||
# Owners are automatically requested for review for PRs that changes code
|
# Owners are automatically requested for review for PRs that changes code that they own.
|
||||||
|
|
||||||
# that they own.
|
|
||||||
|
|
||||||
/frontend/ @SigNoz/frontend-maintainers
|
/frontend/ @SigNoz/frontend-maintainers
|
||||||
|
|
||||||
@@ -11,8 +9,10 @@
|
|||||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||||
|
|
||||||
/deploy/ @SigNoz/devops
|
# CI
|
||||||
.github @SigNoz/devops
|
/deploy/ @therealpandey
|
||||||
|
.github @therealpandey
|
||||||
|
go.mod @therealpandey
|
||||||
|
|
||||||
# Scaffold Owners
|
# Scaffold Owners
|
||||||
|
|
||||||
@@ -105,6 +105,10 @@
|
|||||||
/pkg/modules/authdomain/ @vikrantgupta25
|
/pkg/modules/authdomain/ @vikrantgupta25
|
||||||
/pkg/modules/role/ @vikrantgupta25
|
/pkg/modules/role/ @vikrantgupta25
|
||||||
|
|
||||||
|
# IdentN Owners
|
||||||
|
/pkg/identn/ @vikrantgupta25
|
||||||
|
/pkg/http/middleware/identn.go @vikrantgupta25
|
||||||
|
|
||||||
# Integration tests
|
# Integration tests
|
||||||
|
|
||||||
/tests/integration/ @vikrantgupta25
|
/tests/integration/ @vikrantgupta25
|
||||||
@@ -127,12 +131,15 @@
|
|||||||
/frontend/src/pages/DashboardsListPage/ @SigNoz/pulse-frontend
|
/frontend/src/pages/DashboardsListPage/ @SigNoz/pulse-frontend
|
||||||
/frontend/src/container/ListOfDashboard/ @SigNoz/pulse-frontend
|
/frontend/src/container/ListOfDashboard/ @SigNoz/pulse-frontend
|
||||||
|
|
||||||
|
# Dashboard Widget Page
|
||||||
|
/frontend/src/pages/DashboardWidget/ @SigNoz/pulse-frontend
|
||||||
|
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
|
||||||
|
|
||||||
## Dashboard Page
|
## Dashboard Page
|
||||||
|
|
||||||
/frontend/src/pages/DashboardPage/ @SigNoz/pulse-frontend
|
/frontend/src/pages/DashboardPage/ @SigNoz/pulse-frontend
|
||||||
/frontend/src/container/DashboardContainer/ @SigNoz/pulse-frontend
|
/frontend/src/container/DashboardContainer/ @SigNoz/pulse-frontend
|
||||||
/frontend/src/container/GridCardLayout/ @SigNoz/pulse-frontend
|
/frontend/src/container/GridCardLayout/ @SigNoz/pulse-frontend
|
||||||
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
|
|
||||||
|
|
||||||
## Public Dashboard Page
|
## Public Dashboard Page
|
||||||
|
|
||||||
|
|||||||
10
.github/workflows/goci.yaml
vendored
10
.github/workflows/goci.yaml
vendored
@@ -102,13 +102,3 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
go run cmd/enterprise/*.go generate openapi
|
go run cmd/enterprise/*.go generate openapi
|
||||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
||||||
- name: node-install
|
|
||||||
uses: actions/setup-node@v5
|
|
||||||
with:
|
|
||||||
node-version: "22"
|
|
||||||
- name: install-frontend
|
|
||||||
run: cd frontend && yarn install
|
|
||||||
- name: generate-api-clients
|
|
||||||
run: |
|
|
||||||
cd frontend && yarn generate:api
|
|
||||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)
|
|
||||||
|
|||||||
2
.github/workflows/integrationci.yaml
vendored
2
.github/workflows/integrationci.yaml
vendored
@@ -29,6 +29,7 @@ jobs:
|
|||||||
- name: fmt
|
- name: fmt
|
||||||
run: |
|
run: |
|
||||||
make py-fmt
|
make py-fmt
|
||||||
|
git diff --exit-code -- tests/integration/
|
||||||
- name: lint
|
- name: lint
|
||||||
run: |
|
run: |
|
||||||
make py-lint
|
make py-lint
|
||||||
@@ -49,6 +50,7 @@ jobs:
|
|||||||
- ttl
|
- ttl
|
||||||
- alerts
|
- alerts
|
||||||
- ingestionkeys
|
- ingestionkeys
|
||||||
|
- rootuser
|
||||||
sqlstore-provider:
|
sqlstore-provider:
|
||||||
- postgres
|
- postgres
|
||||||
- sqlite
|
- sqlite
|
||||||
|
|||||||
51
.github/workflows/jsci.yaml
vendored
51
.github/workflows/jsci.yaml
vendored
@@ -52,16 +52,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
PRIMUS_REF: main
|
PRIMUS_REF: main
|
||||||
JS_SRC: frontend
|
JS_SRC: frontend
|
||||||
md-languages:
|
languages:
|
||||||
if: |
|
if: |
|
||||||
github.event_name == 'merge_group' ||
|
github.event_name == 'merge_group' ||
|
||||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: checkout
|
- name: self-checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: validate md languages
|
- name: run
|
||||||
run: bash frontend/scripts/validate-md-languages.sh
|
run: bash frontend/scripts/validate-md-languages.sh
|
||||||
authz:
|
authz:
|
||||||
if: |
|
if: |
|
||||||
@@ -70,44 +70,55 @@ jobs:
|
|||||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: self-checkout
|
||||||
uses: actions/checkout@v5
|
uses: actions/checkout@v5
|
||||||
|
- name: node-install
|
||||||
- name: Set up Node.js
|
|
||||||
uses: actions/setup-node@v5
|
uses: actions/setup-node@v5
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
|
- name: deps-install
|
||||||
- name: Install frontend dependencies
|
|
||||||
working-directory: ./frontend
|
working-directory: ./frontend
|
||||||
run: |
|
run: |
|
||||||
yarn install
|
yarn install
|
||||||
|
- name: uv-install
|
||||||
- name: Install uv
|
|
||||||
uses: astral-sh/setup-uv@v5
|
uses: astral-sh/setup-uv@v5
|
||||||
|
- name: uv-deps
|
||||||
- name: Install Python dependencies
|
|
||||||
working-directory: ./tests/integration
|
working-directory: ./tests/integration
|
||||||
run: |
|
run: |
|
||||||
uv sync
|
uv sync
|
||||||
|
- name: setup-test
|
||||||
- name: Start test environment
|
|
||||||
run: |
|
run: |
|
||||||
make py-test-setup
|
make py-test-setup
|
||||||
|
- name: generate
|
||||||
- name: Generate permissions.type.ts
|
|
||||||
working-directory: ./frontend
|
working-directory: ./frontend
|
||||||
run: |
|
run: |
|
||||||
yarn generate:permissions-type
|
yarn generate:permissions-type
|
||||||
|
- name: teardown-test
|
||||||
- name: Teardown test environment
|
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
make py-test-teardown
|
make py-test-teardown
|
||||||
|
- name: validate
|
||||||
- name: Check for changes
|
|
||||||
run: |
|
run: |
|
||||||
if ! git diff --exit-code frontend/src/hooks/useAuthZ/permissions.type.ts; then
|
if ! git diff --exit-code frontend/src/hooks/useAuthZ/permissions.type.ts; then
|
||||||
echo "::error::frontend/src/hooks/useAuthZ/permissions.type.ts is out of date. Please run the generator locally and commit the changes: npm run generate:permissions-type (from the frontend directory)"
|
echo "::error::frontend/src/hooks/useAuthZ/permissions.type.ts is out of date. Please run the generator locally and commit the changes: npm run generate:permissions-type (from the frontend directory)"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
openapi:
|
||||||
|
if: |
|
||||||
|
github.event_name == 'merge_group' ||
|
||||||
|
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||||
|
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: self-checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: node-install
|
||||||
|
uses: actions/setup-node@v5
|
||||||
|
with:
|
||||||
|
node-version: "22"
|
||||||
|
- name: install-frontend
|
||||||
|
run: cd frontend && yarn install
|
||||||
|
- name: generate-api-clients
|
||||||
|
run: |
|
||||||
|
cd frontend && yarn generate:api
|
||||||
|
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)
|
||||||
|
|||||||
60
.github/workflows/mergequeueci.yaml
vendored
Normal file
60
.github/workflows/mergequeueci.yaml
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
name: mergequeueci
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types:
|
||||||
|
- dequeued
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.pull_request.merged == false
|
||||||
|
steps:
|
||||||
|
- name: alert
|
||||||
|
uses: slackapi/slack-github-action@v2.1.1
|
||||||
|
with:
|
||||||
|
webhook: ${{ secrets.SLACK_MERGE_QUEUE_WEBHOOK }}
|
||||||
|
webhook-type: incoming-webhook
|
||||||
|
payload: |
|
||||||
|
{
|
||||||
|
"text": ":x: PR removed from merge queue",
|
||||||
|
"blocks": [
|
||||||
|
{
|
||||||
|
"type": "header",
|
||||||
|
"text": {
|
||||||
|
"type": "plain_text",
|
||||||
|
"text": ":x: PR Removed from Merge Queue"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"text": {
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": "*<${{ github.event.pull_request.html_url }}|PR #${{ github.event.pull_request.number }}: ${{ github.event.pull_request.title }}>*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "divider"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "section",
|
||||||
|
"fields": [
|
||||||
|
{
|
||||||
|
"type": "mrkdwn",
|
||||||
|
"text": "*Author*\n@${{ github.event.pull_request.user.login }}"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- name: comment
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||||
|
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||||
|
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||||
|
run: |
|
||||||
|
gh api repos/${{ github.repository }}/issues/$PR_NUMBER/comments \
|
||||||
|
-f body="> :x: **PR removed from merge queue**
|
||||||
|
>
|
||||||
|
> @$PR_AUTHOR your PR was removed from the merge queue. Fix the issue and re-queue when ready."
|
||||||
@@ -35,7 +35,7 @@ linters:
|
|||||||
- identical
|
- identical
|
||||||
sloglint:
|
sloglint:
|
||||||
no-mixed-args: true
|
no-mixed-args: true
|
||||||
kv-only: true
|
attr-only: true
|
||||||
no-global: all
|
no-global: all
|
||||||
context: all
|
context: all
|
||||||
static-msg: true
|
static-msg: true
|
||||||
|
|||||||
@@ -4,12 +4,15 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/cmd"
|
"github.com/SigNoz/signoz/cmd"
|
||||||
"github.com/SigNoz/signoz/pkg/analytics"
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/authn"
|
"github.com/SigNoz/signoz/pkg/authn"
|
||||||
"github.com/SigNoz/signoz/pkg/authz"
|
"github.com/SigNoz/signoz/pkg/authz"
|
||||||
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||||
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
|
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/gateway"
|
"github.com/SigNoz/signoz/pkg/gateway"
|
||||||
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
||||||
@@ -28,18 +31,17 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/version"
|
"github.com/SigNoz/signoz/pkg/version"
|
||||||
"github.com/SigNoz/signoz/pkg/zeus"
|
"github.com/SigNoz/signoz/pkg/zeus"
|
||||||
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
|
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||||
var flags signoz.DeprecatedFlags
|
var configFiles []string
|
||||||
|
|
||||||
serverCmd := &cobra.Command{
|
serverCmd := &cobra.Command{
|
||||||
Use: "server",
|
Use: "server",
|
||||||
Short: "Run the SigNoz server",
|
Short: "Run the SigNoz server",
|
||||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
|
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, configFiles)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -48,7 +50,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
flags.RegisterFlags(serverCmd)
|
serverCmd.Flags().StringArrayVar(&configFiles, "config", nil, "path to a YAML configuration file (can be specified multiple times, later files override earlier ones)")
|
||||||
parentCmd.AddCommand(serverCmd)
|
parentCmd.AddCommand(serverCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,37 +92,37 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
server, err := app.NewServer(config, signoz)
|
server, err := app.NewServer(config, signoz)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
logger.ErrorContext(ctx, "failed to create server", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := server.Start(ctx); err != nil {
|
if err := server.Start(ctx); err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to start server", "error", err)
|
logger.ErrorContext(ctx, "failed to start server", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
signoz.Start(ctx)
|
signoz.Start(ctx)
|
||||||
|
|
||||||
if err := signoz.Wait(ctx); err != nil {
|
if err := signoz.Wait(ctx); err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to start signoz", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = server.Stop(ctx)
|
err = server.Stop(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to stop server", "error", err)
|
logger.ErrorContext(ctx, "failed to stop server", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = signoz.Stop(ctx)
|
err = signoz.Stop(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to stop signoz", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,18 +10,23 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/signoz"
|
"github.com/SigNoz/signoz/pkg/signoz"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) {
|
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, configFiles []string) (signoz.Config, error) {
|
||||||
|
uris := make([]string, 0, len(configFiles)+1)
|
||||||
|
for _, f := range configFiles {
|
||||||
|
uris = append(uris, "file:"+f)
|
||||||
|
}
|
||||||
|
uris = append(uris, "env:")
|
||||||
|
|
||||||
config, err := signoz.NewConfig(
|
config, err := signoz.NewConfig(
|
||||||
ctx,
|
ctx,
|
||||||
logger,
|
logger,
|
||||||
config.ResolverConfig{
|
config.ResolverConfig{
|
||||||
Uris: []string{"env:"},
|
Uris: uris,
|
||||||
ProviderFactories: []config.ProviderFactory{
|
ProviderFactories: []config.ProviderFactory{
|
||||||
envprovider.NewFactory(),
|
envprovider.NewFactory(),
|
||||||
fileprovider.NewFactory(),
|
fileprovider.NewFactory(),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
flags,
|
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return signoz.Config{}, err
|
return signoz.Config{}, err
|
||||||
|
|||||||
86
cmd/config_test.go
Normal file
86
cmd/config_test.go
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
package cmd
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNewSigNozConfig_NoConfigFiles(t *testing.T) {
|
||||||
|
logger := slog.New(slog.DiscardHandler)
|
||||||
|
config, err := NewSigNozConfig(context.Background(), logger, nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotZero(t, config)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewSigNozConfig_SingleConfigFile(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
configPath := filepath.Join(dir, "config.yaml")
|
||||||
|
err := os.WriteFile(configPath, []byte(`
|
||||||
|
cache:
|
||||||
|
provider: "redis"
|
||||||
|
`), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
logger := slog.New(slog.DiscardHandler)
|
||||||
|
config, err := NewSigNozConfig(context.Background(), logger, []string{configPath})
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "redis", config.Cache.Provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewSigNozConfig_MultipleConfigFiles_LaterOverridesEarlier(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
|
||||||
|
basePath := filepath.Join(dir, "base.yaml")
|
||||||
|
err := os.WriteFile(basePath, []byte(`
|
||||||
|
cache:
|
||||||
|
provider: "memory"
|
||||||
|
sqlstore:
|
||||||
|
provider: "sqlite"
|
||||||
|
`), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
overridePath := filepath.Join(dir, "override.yaml")
|
||||||
|
err = os.WriteFile(overridePath, []byte(`
|
||||||
|
cache:
|
||||||
|
provider: "redis"
|
||||||
|
`), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
logger := slog.New(slog.DiscardHandler)
|
||||||
|
config, err := NewSigNozConfig(context.Background(), logger, []string{basePath, overridePath})
|
||||||
|
require.NoError(t, err)
|
||||||
|
// Later file overrides earlier
|
||||||
|
assert.Equal(t, "redis", config.Cache.Provider)
|
||||||
|
// Value from base file that wasn't overridden persists
|
||||||
|
assert.Equal(t, "sqlite", config.SQLStore.Provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewSigNozConfig_EnvOverridesConfigFile(t *testing.T) {
|
||||||
|
dir := t.TempDir()
|
||||||
|
configPath := filepath.Join(dir, "config.yaml")
|
||||||
|
err := os.WriteFile(configPath, []byte(`
|
||||||
|
cache:
|
||||||
|
provider: "fromfile"
|
||||||
|
`), 0644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
t.Setenv("SIGNOZ_CACHE_PROVIDER", "fromenv")
|
||||||
|
|
||||||
|
logger := slog.New(slog.DiscardHandler)
|
||||||
|
config, err := NewSigNozConfig(context.Background(), logger, []string{configPath})
|
||||||
|
require.NoError(t, err)
|
||||||
|
// Env should override file
|
||||||
|
assert.Equal(t, "fromenv", config.Cache.Provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestNewSigNozConfig_NonexistentFile(t *testing.T) {
|
||||||
|
logger := slog.New(slog.DiscardHandler)
|
||||||
|
_, err := NewSigNozConfig(context.Background(), logger, []string{"/nonexistent/config.yaml"})
|
||||||
|
assert.Error(t, err)
|
||||||
|
}
|
||||||
@@ -5,16 +5,18 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/cmd"
|
"github.com/SigNoz/signoz/cmd"
|
||||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
|
||||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||||
|
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||||
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
||||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||||
@@ -23,6 +25,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/analytics"
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/authn"
|
"github.com/SigNoz/signoz/pkg/authn"
|
||||||
"github.com/SigNoz/signoz/pkg/authz"
|
"github.com/SigNoz/signoz/pkg/authz"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/gateway"
|
"github.com/SigNoz/signoz/pkg/gateway"
|
||||||
"github.com/SigNoz/signoz/pkg/licensing"
|
"github.com/SigNoz/signoz/pkg/licensing"
|
||||||
@@ -38,18 +41,17 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/version"
|
"github.com/SigNoz/signoz/pkg/version"
|
||||||
"github.com/SigNoz/signoz/pkg/zeus"
|
"github.com/SigNoz/signoz/pkg/zeus"
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||||
var flags signoz.DeprecatedFlags
|
var configFiles []string
|
||||||
|
|
||||||
serverCmd := &cobra.Command{
|
serverCmd := &cobra.Command{
|
||||||
Use: "server",
|
Use: "server",
|
||||||
Short: "Run the SigNoz server",
|
Short: "Run the SigNoz server",
|
||||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
|
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, configFiles)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -58,7 +60,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
flags.RegisterFlags(serverCmd)
|
serverCmd.Flags().StringArrayVar(&configFiles, "config", nil, "path to a YAML configuration file (can be specified multiple times, later files override earlier ones)")
|
||||||
parentCmd.AddCommand(serverCmd)
|
parentCmd.AddCommand(serverCmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -69,7 +71,7 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
|||||||
// add enterprise sqlstore factories to the community sqlstore factories
|
// add enterprise sqlstore factories to the community sqlstore factories
|
||||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
|
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -132,37 +134,37 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
|||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
server, err := enterpriseapp.NewServer(config, signoz)
|
server, err := enterpriseapp.NewServer(config, signoz)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
logger.ErrorContext(ctx, "failed to create server", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := server.Start(ctx); err != nil {
|
if err := server.Start(ctx); err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to start server", "error", err)
|
logger.ErrorContext(ctx, "failed to start server", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
signoz.Start(ctx)
|
signoz.Start(ctx)
|
||||||
|
|
||||||
if err := signoz.Wait(ctx); err != nil {
|
if err := signoz.Wait(ctx); err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to start signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to start signoz", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = server.Stop(ctx)
|
err = server.Stop(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to stop server", "error", err)
|
logger.ErrorContext(ctx, "failed to stop server", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = signoz.Stop(ctx)
|
err = signoz.Stop(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(ctx, "failed to stop signoz", "error", err)
|
logger.ErrorContext(ctx, "failed to stop signoz", errors.Attr(err))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
13
cmd/root.go
13
cmd/root.go
@@ -4,9 +4,10 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/version"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"go.uber.org/zap" //nolint:depguard
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/version"
|
||||||
)
|
)
|
||||||
|
|
||||||
var RootCmd = &cobra.Command{
|
var RootCmd = &cobra.Command{
|
||||||
@@ -19,15 +20,9 @@ var RootCmd = &cobra.Command{
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Execute(logger *slog.Logger) {
|
func Execute(logger *slog.Logger) {
|
||||||
zapLogger := newZapLogger()
|
|
||||||
zap.ReplaceGlobals(zapLogger)
|
|
||||||
defer func() {
|
|
||||||
_ = zapLogger.Sync()
|
|
||||||
}()
|
|
||||||
|
|
||||||
err := RootCmd.Execute()
|
err := RootCmd.Execute()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.ErrorContext(RootCmd.Context(), "error running command", "error", err)
|
logger.ErrorContext(RootCmd.Context(), "error running command", errors.Attr(err))
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
110
cmd/zap.go
110
cmd/zap.go
@@ -1,110 +0,0 @@
|
|||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
|
||||||
"go.uber.org/zap" //nolint:depguard
|
|
||||||
"go.uber.org/zap/zapcore" //nolint:depguard
|
|
||||||
)
|
|
||||||
|
|
||||||
// Deprecated: Use `NewLogger` from `pkg/instrumentation` instead.
|
|
||||||
func newZapLogger() *zap.Logger {
|
|
||||||
config := zap.NewProductionConfig()
|
|
||||||
config.EncoderConfig.TimeKey = "timestamp"
|
|
||||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
|
||||||
|
|
||||||
// Extract sampling config before building the logger.
|
|
||||||
// We need to disable sampling in the config and apply it manually later
|
|
||||||
// to ensure correct core ordering. See filteringCore documentation for details.
|
|
||||||
samplerConfig := config.Sampling
|
|
||||||
config.Sampling = nil
|
|
||||||
|
|
||||||
logger, _ := config.Build()
|
|
||||||
|
|
||||||
// Wrap with custom core wrapping to filter certain log entries.
|
|
||||||
// The order of wrapping is important:
|
|
||||||
// 1. First wrap with filteringCore
|
|
||||||
// 2. Then wrap with sampler
|
|
||||||
//
|
|
||||||
// This creates the call chain: sampler -> filteringCore -> ioCore
|
|
||||||
//
|
|
||||||
// During logging:
|
|
||||||
// - sampler.Check decides whether to sample the log entry
|
|
||||||
// - If sampled, filteringCore.Check is called
|
|
||||||
// - filteringCore adds itself to CheckedEntry.cores
|
|
||||||
// - All cores in CheckedEntry.cores have their Write method called
|
|
||||||
// - filteringCore.Write can now filter the entry before passing to ioCore
|
|
||||||
//
|
|
||||||
// If we didn't disable the sampler above, filteringCore would have wrapped
|
|
||||||
// sampler. By calling sampler.Check we would have allowed it to call
|
|
||||||
// ioCore.Check that adds itself to CheckedEntry.cores. Then ioCore.Write
|
|
||||||
// would have bypassed our checks, making filtering impossible.
|
|
||||||
return logger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core {
|
|
||||||
core = &filteringCore{core}
|
|
||||||
if samplerConfig != nil {
|
|
||||||
core = zapcore.NewSamplerWithOptions(
|
|
||||||
core,
|
|
||||||
time.Second,
|
|
||||||
samplerConfig.Initial,
|
|
||||||
samplerConfig.Thereafter,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return core
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
// filteringCore wraps a zapcore.Core to filter out log entries based on a
|
|
||||||
// custom logic.
|
|
||||||
//
|
|
||||||
// Note: This core must be positioned before the sampler in the core chain
|
|
||||||
// to ensure Write is called. See newZapLogger for ordering details.
|
|
||||||
type filteringCore struct {
|
|
||||||
zapcore.Core
|
|
||||||
}
|
|
||||||
|
|
||||||
// filter determines whether a log entry should be written based on its fields.
|
|
||||||
// Returns false if the entry should be suppressed, true otherwise.
|
|
||||||
//
|
|
||||||
// Current filters:
|
|
||||||
// - context.Canceled: These are expected errors from cancelled operations,
|
|
||||||
// and create noise in logs.
|
|
||||||
func (c *filteringCore) filter(fields []zapcore.Field) bool {
|
|
||||||
for _, field := range fields {
|
|
||||||
if field.Type == zapcore.ErrorType {
|
|
||||||
if loggedErr, ok := field.Interface.(error); ok {
|
|
||||||
// Suppress logs containing context.Canceled errors
|
|
||||||
if errors.Is(loggedErr, context.Canceled) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// With implements zapcore.Core.With
|
|
||||||
// It returns a new copy with the added context.
|
|
||||||
func (c *filteringCore) With(fields []zapcore.Field) zapcore.Core {
|
|
||||||
return &filteringCore{c.Core.With(fields)}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check implements zapcore.Core.Check.
|
|
||||||
// It adds this core to the CheckedEntry if the log level is enabled,
|
|
||||||
// ensuring that Write will be called for this entry.
|
|
||||||
func (c *filteringCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
|
|
||||||
if c.Enabled(ent.Level) {
|
|
||||||
return ce.AddCore(ent, c)
|
|
||||||
}
|
|
||||||
return ce
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write implements zapcore.Core.Write.
|
|
||||||
// It filters log entries based on their fields before delegating to the wrapped core.
|
|
||||||
func (c *filteringCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
|
|
||||||
if !c.filter(fields) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return c.Core.Write(ent, fields)
|
|
||||||
}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
provider: "inmemory"
|
|
||||||
inmemory:
|
|
||||||
ttl: 60m
|
|
||||||
cleanupInterval: 10m
|
|
||||||
@@ -39,6 +39,13 @@ instrumentation:
|
|||||||
host: "0.0.0.0"
|
host: "0.0.0.0"
|
||||||
port: 9090
|
port: 9090
|
||||||
|
|
||||||
|
##################### PProf #####################
|
||||||
|
pprof:
|
||||||
|
# Whether to enable the pprof server.
|
||||||
|
enabled: true
|
||||||
|
# The address on which the pprof server listens.
|
||||||
|
address: 0.0.0.0:6060
|
||||||
|
|
||||||
##################### Web #####################
|
##################### Web #####################
|
||||||
web:
|
web:
|
||||||
# Whether to enable the web frontend
|
# Whether to enable the web frontend
|
||||||
@@ -308,6 +315,9 @@ user:
|
|||||||
allow_self: true
|
allow_self: true
|
||||||
# The duration within which a user can reset their password.
|
# The duration within which a user can reset their password.
|
||||||
max_token_lifetime: 6h
|
max_token_lifetime: 6h
|
||||||
|
invite:
|
||||||
|
# The duration within which a user can accept their invite.
|
||||||
|
max_token_lifetime: 48h
|
||||||
root:
|
root:
|
||||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||||
# on startup using the email and password below. The root user cannot be
|
# on startup using the email and password below. The root user cannot be
|
||||||
@@ -321,3 +331,22 @@ user:
|
|||||||
org:
|
org:
|
||||||
name: default
|
name: default
|
||||||
id: 00000000-0000-0000-0000-000000000000
|
id: 00000000-0000-0000-0000-000000000000
|
||||||
|
|
||||||
|
##################### IdentN #####################
|
||||||
|
identn:
|
||||||
|
tokenizer:
|
||||||
|
# toggle tokenizer identN
|
||||||
|
enabled: true
|
||||||
|
# headers to use for tokenizer identN resolver
|
||||||
|
headers:
|
||||||
|
- Authorization
|
||||||
|
- Sec-WebSocket-Protocol
|
||||||
|
apikey:
|
||||||
|
# toggle apikey identN
|
||||||
|
enabled: true
|
||||||
|
# headers to use for apikey identN resolver
|
||||||
|
headers:
|
||||||
|
- SIGNOZ-API-KEY
|
||||||
|
impersonation:
|
||||||
|
# toggle impersonation identN, when enabled, all requests will impersonate the root user
|
||||||
|
enabled: false
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
# my global config
|
|
||||||
global:
|
|
||||||
scrape_interval: 5s # Set the scrape interval to every 15 seconds. Default is every 1 minute.
|
|
||||||
evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute.
|
|
||||||
# scrape_timeout is set to the global default (10s).
|
|
||||||
|
|
||||||
# Alertmanager configuration
|
|
||||||
alerting:
|
|
||||||
alertmanagers:
|
|
||||||
- static_configs:
|
|
||||||
- targets:
|
|
||||||
- 127.0.0.1:9093
|
|
||||||
|
|
||||||
# Load rules once and periodically evaluate them according to the global 'evaluation_interval'.
|
|
||||||
rule_files:
|
|
||||||
# - "first_rules.yml"
|
|
||||||
# - "second_rules.yml"
|
|
||||||
- 'alerts.yml'
|
|
||||||
|
|
||||||
# A scrape configuration containing exactly one endpoint to scrape:
|
|
||||||
# Here it's Prometheus itself.
|
|
||||||
scrape_configs: []
|
|
||||||
|
|
||||||
remote_read:
|
|
||||||
- url: tcp://localhost:9000/signoz_metrics
|
|
||||||
@@ -190,7 +190,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:v0.115.0
|
image: signoz/signoz:v0.116.1
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080" # signoz port
|
- "8080:8080" # signoz port
|
||||||
# - "6060:6060" # pprof port
|
# - "6060:6060" # pprof port
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:v0.115.0
|
image: signoz/signoz:v0.116.1
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080" # signoz port
|
- "8080:8080" # signoz port
|
||||||
volumes:
|
volumes:
|
||||||
|
|||||||
@@ -1,38 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
x-common: &common
|
|
||||||
networks:
|
|
||||||
- signoz-net
|
|
||||||
extra_hosts:
|
|
||||||
- host.docker.internal:host-gateway
|
|
||||||
logging:
|
|
||||||
options:
|
|
||||||
max-size: 50m
|
|
||||||
max-file: "3"
|
|
||||||
deploy:
|
|
||||||
restart_policy:
|
|
||||||
condition: on-failure
|
|
||||||
services:
|
|
||||||
hotrod:
|
|
||||||
<<: *common
|
|
||||||
image: jaegertracing/example-hotrod:1.61.0
|
|
||||||
command: [ "all" ]
|
|
||||||
environment:
|
|
||||||
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 #
|
|
||||||
load-hotrod:
|
|
||||||
<<: *common
|
|
||||||
image: "signoz/locust:1.2.3"
|
|
||||||
environment:
|
|
||||||
ATTACKED_HOST: http://hotrod:8080
|
|
||||||
LOCUST_MODE: standalone
|
|
||||||
NO_PROXY: standalone
|
|
||||||
TASK_DELAY_FROM: 5
|
|
||||||
TASK_DELAY_TO: 30
|
|
||||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
|
||||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
|
||||||
volumes:
|
|
||||||
- ../../../common/locust-scripts:/locust
|
|
||||||
|
|
||||||
networks:
|
|
||||||
signoz-net:
|
|
||||||
name: signoz-net
|
|
||||||
external: true
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
x-common: &common
|
|
||||||
networks:
|
|
||||||
- signoz-net
|
|
||||||
extra_hosts:
|
|
||||||
- host.docker.internal:host-gateway
|
|
||||||
logging:
|
|
||||||
options:
|
|
||||||
max-size: 50m
|
|
||||||
max-file: "3"
|
|
||||||
deploy:
|
|
||||||
mode: global
|
|
||||||
restart_policy:
|
|
||||||
condition: on-failure
|
|
||||||
services:
|
|
||||||
otel-agent:
|
|
||||||
<<: *common
|
|
||||||
image: otel/opentelemetry-collector-contrib:0.111.0
|
|
||||||
command:
|
|
||||||
- --config=/etc/otel-collector-config.yaml
|
|
||||||
volumes:
|
|
||||||
- ./otel-agent-config.yaml:/etc/otel-collector-config.yaml
|
|
||||||
- /:/hostfs:ro
|
|
||||||
environment:
|
|
||||||
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
|
|
||||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
|
||||||
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
|
|
||||||
# Before exposing the ports, make sure the ports are not used by other services
|
|
||||||
# ports:
|
|
||||||
# - "4317:4317"
|
|
||||||
# - "4318:4318"
|
|
||||||
otel-metrics:
|
|
||||||
<<: *common
|
|
||||||
image: otel/opentelemetry-collector-contrib:0.111.0
|
|
||||||
user: 0:0 # If you have security concerns, you can replace this with your `UID:GID` that has necessary permissions to docker.sock
|
|
||||||
command:
|
|
||||||
- --config=/etc/otel-collector-config.yaml
|
|
||||||
volumes:
|
|
||||||
- ./otel-metrics-config.yaml:/etc/otel-collector-config.yaml
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
environment:
|
|
||||||
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
|
|
||||||
- OTEL_RESOURCE_ATTRIBUTES=host.name={{.Node.Hostname}},os.type={{.Node.Platform.OS}}
|
|
||||||
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
|
|
||||||
# Before exposing the ports, make sure the ports are not used by other services
|
|
||||||
# ports:
|
|
||||||
# - "4317:4317"
|
|
||||||
# - "4318:4318"
|
|
||||||
deploy:
|
|
||||||
mode: replicated
|
|
||||||
replicas: 1
|
|
||||||
placement:
|
|
||||||
constraints:
|
|
||||||
- node.role == manager
|
|
||||||
logspout:
|
|
||||||
<<: *common
|
|
||||||
image: "gliderlabs/logspout:v3.2.14"
|
|
||||||
command: syslog+tcp://otel-agent:2255
|
|
||||||
user: root
|
|
||||||
volumes:
|
|
||||||
- /etc/hostname:/etc/host_hostname:ro
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
depends_on:
|
|
||||||
- otel-agent
|
|
||||||
|
|
||||||
networks:
|
|
||||||
signoz-net:
|
|
||||||
name: signoz-net
|
|
||||||
external: true
|
|
||||||
@@ -1,102 +0,0 @@
|
|||||||
receivers:
|
|
||||||
hostmetrics:
|
|
||||||
collection_interval: 30s
|
|
||||||
root_path: /hostfs
|
|
||||||
scrapers:
|
|
||||||
cpu: {}
|
|
||||||
load: {}
|
|
||||||
memory: {}
|
|
||||||
disk: {}
|
|
||||||
filesystem: {}
|
|
||||||
network: {}
|
|
||||||
otlp:
|
|
||||||
protocols:
|
|
||||||
grpc:
|
|
||||||
endpoint: 0.0.0.0:4317
|
|
||||||
http:
|
|
||||||
endpoint: 0.0.0.0:4318
|
|
||||||
prometheus:
|
|
||||||
config:
|
|
||||||
global:
|
|
||||||
scrape_interval: 60s
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: otel-agent
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
- localhost:8888
|
|
||||||
labels:
|
|
||||||
job_name: otel-agent
|
|
||||||
tcplog/docker:
|
|
||||||
listen_address: "0.0.0.0:2255"
|
|
||||||
operators:
|
|
||||||
- type: regex_parser
|
|
||||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
|
||||||
timestamp:
|
|
||||||
parse_from: attributes.timestamp
|
|
||||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
|
||||||
- type: move
|
|
||||||
from: attributes["body"]
|
|
||||||
to: body
|
|
||||||
- type: remove
|
|
||||||
field: attributes.timestamp
|
|
||||||
# please remove names from below if you want to collect logs from them
|
|
||||||
- type: filter
|
|
||||||
id: signoz_logs_filter
|
|
||||||
expr: 'attributes.container_name matches "^(signoz_(logspout|signoz|otel-collector|clickhouse|zookeeper))|(infra_(logspout|otel-agent|otel-metrics)).*"'
|
|
||||||
processors:
|
|
||||||
batch:
|
|
||||||
send_batch_size: 10000
|
|
||||||
send_batch_max_size: 11000
|
|
||||||
timeout: 10s
|
|
||||||
resourcedetection:
|
|
||||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
|
||||||
detectors:
|
|
||||||
# - ec2
|
|
||||||
# - gcp
|
|
||||||
# - azure
|
|
||||||
- env
|
|
||||||
- system
|
|
||||||
timeout: 2s
|
|
||||||
extensions:
|
|
||||||
health_check:
|
|
||||||
endpoint: 0.0.0.0:13133
|
|
||||||
pprof:
|
|
||||||
endpoint: 0.0.0.0:1777
|
|
||||||
exporters:
|
|
||||||
otlp:
|
|
||||||
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
|
|
||||||
tls:
|
|
||||||
insecure: true
|
|
||||||
headers:
|
|
||||||
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
|
|
||||||
# debug: {}
|
|
||||||
service:
|
|
||||||
telemetry:
|
|
||||||
logs:
|
|
||||||
encoding: json
|
|
||||||
metrics:
|
|
||||||
address: 0.0.0.0:8888
|
|
||||||
extensions:
|
|
||||||
- health_check
|
|
||||||
- pprof
|
|
||||||
pipelines:
|
|
||||||
traces:
|
|
||||||
receivers: [otlp]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
metrics:
|
|
||||||
receivers: [otlp]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
metrics/hostmetrics:
|
|
||||||
receivers: [hostmetrics]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
metrics/prometheus:
|
|
||||||
receivers: [prometheus]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
logs:
|
|
||||||
receivers: [otlp, tcplog/docker]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
@@ -1,103 +0,0 @@
|
|||||||
receivers:
|
|
||||||
prometheus:
|
|
||||||
config:
|
|
||||||
global:
|
|
||||||
scrape_interval: 60s
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: otel-metrics
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
- localhost:8888
|
|
||||||
labels:
|
|
||||||
job_name: otel-metrics
|
|
||||||
# For Docker daemon metrics to be scraped, it must be configured to expose
|
|
||||||
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
|
|
||||||
# - job_name: docker-daemon
|
|
||||||
# dockerswarm_sd_configs:
|
|
||||||
# - host: unix:///var/run/docker.sock
|
|
||||||
# role: nodes
|
|
||||||
# relabel_configs:
|
|
||||||
# - source_labels: [__meta_dockerswarm_node_address]
|
|
||||||
# target_label: __address__
|
|
||||||
# replacement: $1:9323
|
|
||||||
- job_name: "dockerswarm"
|
|
||||||
dockerswarm_sd_configs:
|
|
||||||
- host: unix:///var/run/docker.sock
|
|
||||||
role: tasks
|
|
||||||
relabel_configs:
|
|
||||||
- action: keep
|
|
||||||
regex: running
|
|
||||||
source_labels:
|
|
||||||
- __meta_dockerswarm_task_desired_state
|
|
||||||
- action: keep
|
|
||||||
regex: true
|
|
||||||
source_labels:
|
|
||||||
- __meta_dockerswarm_service_label_signoz_io_scrape
|
|
||||||
- regex: ([^:]+)(?::\d+)?
|
|
||||||
replacement: $1
|
|
||||||
source_labels:
|
|
||||||
- __address__
|
|
||||||
target_label: swarm_container_ip
|
|
||||||
- separator: .
|
|
||||||
source_labels:
|
|
||||||
- __meta_dockerswarm_service_name
|
|
||||||
- __meta_dockerswarm_task_slot
|
|
||||||
- __meta_dockerswarm_task_id
|
|
||||||
target_label: swarm_container_name
|
|
||||||
- target_label: __address__
|
|
||||||
source_labels:
|
|
||||||
- swarm_container_ip
|
|
||||||
- __meta_dockerswarm_service_label_signoz_io_port
|
|
||||||
separator: ":"
|
|
||||||
- source_labels:
|
|
||||||
- __meta_dockerswarm_service_label_signoz_io_path
|
|
||||||
target_label: __metrics_path__
|
|
||||||
- source_labels:
|
|
||||||
- __meta_dockerswarm_service_label_com_docker_stack_namespace
|
|
||||||
target_label: namespace
|
|
||||||
- source_labels:
|
|
||||||
- __meta_dockerswarm_service_name
|
|
||||||
target_label: service_name
|
|
||||||
- source_labels:
|
|
||||||
- __meta_dockerswarm_task_id
|
|
||||||
target_label: service_instance_id
|
|
||||||
- source_labels:
|
|
||||||
- __meta_dockerswarm_node_hostname
|
|
||||||
target_label: host_name
|
|
||||||
processors:
|
|
||||||
batch:
|
|
||||||
send_batch_size: 10000
|
|
||||||
send_batch_max_size: 11000
|
|
||||||
timeout: 10s
|
|
||||||
resourcedetection:
|
|
||||||
detectors:
|
|
||||||
- env
|
|
||||||
- system
|
|
||||||
timeout: 2s
|
|
||||||
extensions:
|
|
||||||
health_check:
|
|
||||||
endpoint: 0.0.0.0:13133
|
|
||||||
pprof:
|
|
||||||
endpoint: 0.0.0.0:1777
|
|
||||||
exporters:
|
|
||||||
otlp:
|
|
||||||
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
|
|
||||||
tls:
|
|
||||||
insecure: true
|
|
||||||
headers:
|
|
||||||
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
|
|
||||||
# debug: {}
|
|
||||||
service:
|
|
||||||
telemetry:
|
|
||||||
logs:
|
|
||||||
encoding: json
|
|
||||||
metrics:
|
|
||||||
address: 0.0.0.0:8888
|
|
||||||
extensions:
|
|
||||||
- health_check
|
|
||||||
- pprof
|
|
||||||
pipelines:
|
|
||||||
metrics:
|
|
||||||
receivers: [prometheus]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
@@ -181,7 +181,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.115.0}
|
image: signoz/signoz:${VERSION:-v0.116.1}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080" # signoz port
|
- "8080:8080" # signoz port
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ services:
|
|||||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||||
signoz:
|
signoz:
|
||||||
!!merge <<: *db-depend
|
!!merge <<: *db-depend
|
||||||
image: signoz/signoz:${VERSION:-v0.115.0}
|
image: signoz/signoz:${VERSION:-v0.116.1}
|
||||||
container_name: signoz
|
container_name: signoz
|
||||||
ports:
|
ports:
|
||||||
- "8080:8080" # signoz port
|
- "8080:8080" # signoz port
|
||||||
|
|||||||
@@ -1,39 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
x-common: &common
|
|
||||||
networks:
|
|
||||||
- signoz-net
|
|
||||||
extra_hosts:
|
|
||||||
- host.docker.internal:host-gateway
|
|
||||||
logging:
|
|
||||||
options:
|
|
||||||
max-size: 50m
|
|
||||||
max-file: "3"
|
|
||||||
restart: unless-stopped
|
|
||||||
services:
|
|
||||||
hotrod:
|
|
||||||
<<: *common
|
|
||||||
image: jaegertracing/example-hotrod:1.61.0
|
|
||||||
container_name: hotrod
|
|
||||||
command: [ "all" ]
|
|
||||||
environment:
|
|
||||||
- OTEL_EXPORTER_OTLP_ENDPOINT=http://host.docker.internal:4318 # In case of external SigNoz or cloud, update the endpoint and access token
|
|
||||||
# - OTEL_OTLP_HEADERS=signoz-access-token=<your-access-token>
|
|
||||||
load-hotrod:
|
|
||||||
<<: *common
|
|
||||||
image: "signoz/locust:1.2.3"
|
|
||||||
container_name: load-hotrod
|
|
||||||
environment:
|
|
||||||
ATTACKED_HOST: http://hotrod:8080
|
|
||||||
LOCUST_MODE: standalone
|
|
||||||
NO_PROXY: standalone
|
|
||||||
TASK_DELAY_FROM: 5
|
|
||||||
TASK_DELAY_TO: 30
|
|
||||||
QUIET_MODE: "${QUIET_MODE:-false}"
|
|
||||||
LOCUST_OPTS: "--headless -u 10 -r 1"
|
|
||||||
volumes:
|
|
||||||
- ../../../common/locust-scripts:/locust
|
|
||||||
|
|
||||||
networks:
|
|
||||||
signoz-net:
|
|
||||||
name: signoz-net
|
|
||||||
external: true
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
x-common: &common
|
|
||||||
networks:
|
|
||||||
- signoz-net
|
|
||||||
extra_hosts:
|
|
||||||
- host.docker.internal:host-gateway
|
|
||||||
logging:
|
|
||||||
options:
|
|
||||||
max-size: 50m
|
|
||||||
max-file: "3"
|
|
||||||
restart: unless-stopped
|
|
||||||
services:
|
|
||||||
otel-agent:
|
|
||||||
<<: *common
|
|
||||||
image: otel/opentelemetry-collector-contrib:0.111.0
|
|
||||||
command:
|
|
||||||
- --config=/etc/otel-collector-config.yaml
|
|
||||||
volumes:
|
|
||||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
|
||||||
- /:/hostfs:ro
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
environment:
|
|
||||||
- SIGNOZ_COLLECTOR_ENDPOINT=http://host.docker.internal:4317 # In case of external SigNoz or cloud, update the endpoint and access token
|
|
||||||
- OTEL_RESOURCE_ATTRIBUTES=host.name=signoz-host,os.type=linux # Replace signoz-host with the actual hostname
|
|
||||||
# - SIGNOZ_ACCESS_TOKEN="<your-access-token>"
|
|
||||||
# Before exposing the ports, make sure the ports are not used by other services
|
|
||||||
# ports:
|
|
||||||
# - "4317:4317"
|
|
||||||
# - "4318:4318"
|
|
||||||
logspout:
|
|
||||||
<<: *common
|
|
||||||
image: "gliderlabs/logspout:v3.2.14"
|
|
||||||
volumes:
|
|
||||||
- /etc/hostname:/etc/host_hostname:ro
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
command: syslog+tcp://otel-agent:2255
|
|
||||||
depends_on:
|
|
||||||
- otel-agent
|
|
||||||
|
|
||||||
networks:
|
|
||||||
signoz-net:
|
|
||||||
name: signoz-net
|
|
||||||
external: true
|
|
||||||
@@ -1,139 +0,0 @@
|
|||||||
receivers:
|
|
||||||
hostmetrics:
|
|
||||||
collection_interval: 30s
|
|
||||||
root_path: /hostfs
|
|
||||||
scrapers:
|
|
||||||
cpu: {}
|
|
||||||
load: {}
|
|
||||||
memory: {}
|
|
||||||
disk: {}
|
|
||||||
filesystem: {}
|
|
||||||
network: {}
|
|
||||||
otlp:
|
|
||||||
protocols:
|
|
||||||
grpc:
|
|
||||||
endpoint: 0.0.0.0:4317
|
|
||||||
http:
|
|
||||||
endpoint: 0.0.0.0:4318
|
|
||||||
prometheus:
|
|
||||||
config:
|
|
||||||
global:
|
|
||||||
scrape_interval: 60s
|
|
||||||
scrape_configs:
|
|
||||||
- job_name: otel-collector
|
|
||||||
static_configs:
|
|
||||||
- targets:
|
|
||||||
- localhost:8888
|
|
||||||
labels:
|
|
||||||
job_name: otel-collector
|
|
||||||
# For Docker daemon metrics to be scraped, it must be configured to expose
|
|
||||||
# Prometheus metrics, as documented here: https://docs.docker.com/config/daemon/prometheus/
|
|
||||||
# - job_name: docker-daemon
|
|
||||||
# static_configs:
|
|
||||||
# - targets:
|
|
||||||
# - host.docker.internal:9323
|
|
||||||
# labels:
|
|
||||||
# job_name: docker-daemon
|
|
||||||
- job_name: docker-container
|
|
||||||
docker_sd_configs:
|
|
||||||
- host: unix:///var/run/docker.sock
|
|
||||||
relabel_configs:
|
|
||||||
- action: keep
|
|
||||||
regex: true
|
|
||||||
source_labels:
|
|
||||||
- __meta_docker_container_label_signoz_io_scrape
|
|
||||||
- regex: true
|
|
||||||
source_labels:
|
|
||||||
- __meta_docker_container_label_signoz_io_path
|
|
||||||
target_label: __metrics_path__
|
|
||||||
- regex: (.+)
|
|
||||||
source_labels:
|
|
||||||
- __meta_docker_container_label_signoz_io_path
|
|
||||||
target_label: __metrics_path__
|
|
||||||
- separator: ":"
|
|
||||||
source_labels:
|
|
||||||
- __meta_docker_network_ip
|
|
||||||
- __meta_docker_container_label_signoz_io_port
|
|
||||||
target_label: __address__
|
|
||||||
- regex: '/(.*)'
|
|
||||||
replacement: '$1'
|
|
||||||
source_labels:
|
|
||||||
- __meta_docker_container_name
|
|
||||||
target_label: container_name
|
|
||||||
- regex: __meta_docker_container_label_signoz_io_(.+)
|
|
||||||
action: labelmap
|
|
||||||
replacement: $1
|
|
||||||
tcplog/docker:
|
|
||||||
listen_address: "0.0.0.0:2255"
|
|
||||||
operators:
|
|
||||||
- type: regex_parser
|
|
||||||
regex: '^<([0-9]+)>[0-9]+ (?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\.[0-9]+)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?) (?P<container_id>\S+) (?P<container_name>\S+) [0-9]+ - -( (?P<body>.*))?'
|
|
||||||
timestamp:
|
|
||||||
parse_from: attributes.timestamp
|
|
||||||
layout: '%Y-%m-%dT%H:%M:%S.%LZ'
|
|
||||||
- type: move
|
|
||||||
from: attributes["body"]
|
|
||||||
to: body
|
|
||||||
- type: remove
|
|
||||||
field: attributes.timestamp
|
|
||||||
# please remove names from below if you want to collect logs from them
|
|
||||||
- type: filter
|
|
||||||
id: signoz_logs_filter
|
|
||||||
expr: 'attributes.container_name matches "^signoz|(signoz-(|otel-collector|clickhouse|zookeeper))|(infra-(logspout|otel-agent)-.*)"'
|
|
||||||
processors:
|
|
||||||
batch:
|
|
||||||
send_batch_size: 10000
|
|
||||||
send_batch_max_size: 11000
|
|
||||||
timeout: 10s
|
|
||||||
resourcedetection:
|
|
||||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
|
||||||
detectors:
|
|
||||||
# - ec2
|
|
||||||
# - gcp
|
|
||||||
# - azure
|
|
||||||
- env
|
|
||||||
- system
|
|
||||||
timeout: 2s
|
|
||||||
extensions:
|
|
||||||
health_check:
|
|
||||||
endpoint: 0.0.0.0:13133
|
|
||||||
pprof:
|
|
||||||
endpoint: 0.0.0.0:1777
|
|
||||||
exporters:
|
|
||||||
otlp:
|
|
||||||
endpoint: ${env:SIGNOZ_COLLECTOR_ENDPOINT}
|
|
||||||
tls:
|
|
||||||
insecure: true
|
|
||||||
headers:
|
|
||||||
signoz-access-token: ${env:SIGNOZ_ACCESS_TOKEN}
|
|
||||||
# debug: {}
|
|
||||||
service:
|
|
||||||
telemetry:
|
|
||||||
logs:
|
|
||||||
encoding: json
|
|
||||||
metrics:
|
|
||||||
address: 0.0.0.0:8888
|
|
||||||
extensions:
|
|
||||||
- health_check
|
|
||||||
- pprof
|
|
||||||
pipelines:
|
|
||||||
traces:
|
|
||||||
receivers: [otlp]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
metrics:
|
|
||||||
receivers: [otlp]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
metrics/hostmetrics:
|
|
||||||
receivers: [hostmetrics]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
metrics/prometheus:
|
|
||||||
receivers: [prometheus]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
logs:
|
|
||||||
receivers: [otlp, tcplog/docker]
|
|
||||||
processors: [resourcedetection, batch]
|
|
||||||
exporters: [otlp]
|
|
||||||
@@ -220,6 +220,13 @@ components:
|
|||||||
- additions
|
- additions
|
||||||
- deletions
|
- deletions
|
||||||
type: object
|
type: object
|
||||||
|
AuthtypesPatchableRole:
|
||||||
|
properties:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- description
|
||||||
|
type: object
|
||||||
AuthtypesPostableAuthDomain:
|
AuthtypesPostableAuthDomain:
|
||||||
properties:
|
properties:
|
||||||
config:
|
config:
|
||||||
@@ -236,6 +243,15 @@ components:
|
|||||||
password:
|
password:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
AuthtypesPostableRole:
|
||||||
|
properties:
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- name
|
||||||
|
type: object
|
||||||
AuthtypesPostableRotateToken:
|
AuthtypesPostableRotateToken:
|
||||||
properties:
|
properties:
|
||||||
refreshToken:
|
refreshToken:
|
||||||
@@ -251,6 +267,31 @@ components:
|
|||||||
- name
|
- name
|
||||||
- type
|
- type
|
||||||
type: object
|
type: object
|
||||||
|
AuthtypesRole:
|
||||||
|
properties:
|
||||||
|
createdAt:
|
||||||
|
format: date-time
|
||||||
|
type: string
|
||||||
|
description:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
name:
|
||||||
|
type: string
|
||||||
|
orgId:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
updatedAt:
|
||||||
|
format: date-time
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
- name
|
||||||
|
- description
|
||||||
|
- type
|
||||||
|
- orgId
|
||||||
|
type: object
|
||||||
AuthtypesRoleMapping:
|
AuthtypesRoleMapping:
|
||||||
properties:
|
properties:
|
||||||
defaultRole:
|
defaultRole:
|
||||||
@@ -557,6 +598,39 @@ components:
|
|||||||
required:
|
required:
|
||||||
- config
|
- config
|
||||||
type: object
|
type: object
|
||||||
|
GlobaltypesAPIKeyConfig:
|
||||||
|
properties:
|
||||||
|
enabled:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
|
GlobaltypesConfig:
|
||||||
|
properties:
|
||||||
|
external_url:
|
||||||
|
type: string
|
||||||
|
identN:
|
||||||
|
$ref: '#/components/schemas/GlobaltypesIdentNConfig'
|
||||||
|
ingestion_url:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
GlobaltypesIdentNConfig:
|
||||||
|
properties:
|
||||||
|
apikey:
|
||||||
|
$ref: '#/components/schemas/GlobaltypesAPIKeyConfig'
|
||||||
|
impersonation:
|
||||||
|
$ref: '#/components/schemas/GlobaltypesImpersonationConfig'
|
||||||
|
tokenizer:
|
||||||
|
$ref: '#/components/schemas/GlobaltypesTokenizerConfig'
|
||||||
|
type: object
|
||||||
|
GlobaltypesImpersonationConfig:
|
||||||
|
properties:
|
||||||
|
enabled:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
|
GlobaltypesTokenizerConfig:
|
||||||
|
properties:
|
||||||
|
enabled:
|
||||||
|
type: boolean
|
||||||
|
type: object
|
||||||
MetricsexplorertypesListMetric:
|
MetricsexplorertypesListMetric:
|
||||||
properties:
|
properties:
|
||||||
description:
|
description:
|
||||||
@@ -1722,47 +1796,6 @@ components:
|
|||||||
- status
|
- status
|
||||||
- error
|
- error
|
||||||
type: object
|
type: object
|
||||||
RoletypesPatchableRole:
|
|
||||||
properties:
|
|
||||||
description:
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- description
|
|
||||||
type: object
|
|
||||||
RoletypesPostableRole:
|
|
||||||
properties:
|
|
||||||
description:
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- name
|
|
||||||
type: object
|
|
||||||
RoletypesRole:
|
|
||||||
properties:
|
|
||||||
createdAt:
|
|
||||||
format: date-time
|
|
||||||
type: string
|
|
||||||
description:
|
|
||||||
type: string
|
|
||||||
id:
|
|
||||||
type: string
|
|
||||||
name:
|
|
||||||
type: string
|
|
||||||
orgId:
|
|
||||||
type: string
|
|
||||||
type:
|
|
||||||
type: string
|
|
||||||
updatedAt:
|
|
||||||
format: date-time
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- id
|
|
||||||
- name
|
|
||||||
- description
|
|
||||||
- type
|
|
||||||
- orgId
|
|
||||||
type: object
|
|
||||||
ServiceaccounttypesFactorAPIKey:
|
ServiceaccounttypesFactorAPIKey:
|
||||||
properties:
|
properties:
|
||||||
createdAt:
|
createdAt:
|
||||||
@@ -1993,6 +2026,31 @@ components:
|
|||||||
userId:
|
userId:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
|
TypesDeprecatedUser:
|
||||||
|
properties:
|
||||||
|
createdAt:
|
||||||
|
format: date-time
|
||||||
|
type: string
|
||||||
|
displayName:
|
||||||
|
type: string
|
||||||
|
email:
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
isRoot:
|
||||||
|
type: boolean
|
||||||
|
orgId:
|
||||||
|
type: string
|
||||||
|
role:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
updatedAt:
|
||||||
|
format: date-time
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
type: object
|
||||||
TypesGettableAPIKey:
|
TypesGettableAPIKey:
|
||||||
properties:
|
properties:
|
||||||
createdAt:
|
createdAt:
|
||||||
@@ -2030,13 +2088,6 @@ components:
|
|||||||
required:
|
required:
|
||||||
- id
|
- id
|
||||||
type: object
|
type: object
|
||||||
TypesGettableGlobalConfig:
|
|
||||||
properties:
|
|
||||||
external_url:
|
|
||||||
type: string
|
|
||||||
ingestion_url:
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
TypesIdentifiable:
|
TypesIdentifiable:
|
||||||
properties:
|
properties:
|
||||||
id:
|
id:
|
||||||
@@ -2101,17 +2152,6 @@ components:
|
|||||||
role:
|
role:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
TypesPostableAcceptInvite:
|
|
||||||
properties:
|
|
||||||
displayName:
|
|
||||||
type: string
|
|
||||||
password:
|
|
||||||
type: string
|
|
||||||
sourceUrl:
|
|
||||||
type: string
|
|
||||||
token:
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
TypesPostableBulkInviteRequest:
|
TypesPostableBulkInviteRequest:
|
||||||
properties:
|
properties:
|
||||||
invites:
|
invites:
|
||||||
@@ -2207,8 +2247,6 @@ components:
|
|||||||
type: boolean
|
type: boolean
|
||||||
orgId:
|
orgId:
|
||||||
type: string
|
type: string
|
||||||
role:
|
|
||||||
type: string
|
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
updatedAt:
|
updatedAt:
|
||||||
@@ -3255,7 +3293,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/TypesGettableGlobalConfig'
|
$ref: '#/components/schemas/GlobaltypesConfig'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -3263,80 +3301,16 @@ paths:
|
|||||||
- data
|
- data
|
||||||
type: object
|
type: object
|
||||||
description: OK
|
description: OK
|
||||||
"401":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Unauthorized
|
|
||||||
"403":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Forbidden
|
|
||||||
"500":
|
"500":
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
|
||||||
- api_key:
|
|
||||||
- EDITOR
|
|
||||||
- tokenizer:
|
|
||||||
- EDITOR
|
|
||||||
summary: Get global config
|
summary: Get global config
|
||||||
tags:
|
tags:
|
||||||
- global
|
- global
|
||||||
/api/v1/invite:
|
/api/v1/invite:
|
||||||
get:
|
|
||||||
deprecated: false
|
|
||||||
description: This endpoint lists all invites
|
|
||||||
operationId: ListInvite
|
|
||||||
responses:
|
|
||||||
"200":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
properties:
|
|
||||||
data:
|
|
||||||
items:
|
|
||||||
$ref: '#/components/schemas/TypesInvite'
|
|
||||||
type: array
|
|
||||||
status:
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- status
|
|
||||||
- data
|
|
||||||
type: object
|
|
||||||
description: OK
|
|
||||||
"401":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Unauthorized
|
|
||||||
"403":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Forbidden
|
|
||||||
"500":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Internal Server Error
|
|
||||||
security:
|
|
||||||
- api_key:
|
|
||||||
- ADMIN
|
|
||||||
- tokenizer:
|
|
||||||
- ADMIN
|
|
||||||
summary: List invites
|
|
||||||
tags:
|
|
||||||
- users
|
|
||||||
post:
|
post:
|
||||||
deprecated: false
|
deprecated: false
|
||||||
description: This endpoint creates an invite for a user
|
description: This endpoint creates an invite for a user
|
||||||
@@ -3399,151 +3373,6 @@ paths:
|
|||||||
summary: Create invite
|
summary: Create invite
|
||||||
tags:
|
tags:
|
||||||
- users
|
- users
|
||||||
/api/v1/invite/{id}:
|
|
||||||
delete:
|
|
||||||
deprecated: false
|
|
||||||
description: This endpoint deletes an invite by id
|
|
||||||
operationId: DeleteInvite
|
|
||||||
parameters:
|
|
||||||
- in: path
|
|
||||||
name: id
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
responses:
|
|
||||||
"204":
|
|
||||||
description: No Content
|
|
||||||
"400":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Bad Request
|
|
||||||
"401":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Unauthorized
|
|
||||||
"403":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Forbidden
|
|
||||||
"404":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Not Found
|
|
||||||
"500":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Internal Server Error
|
|
||||||
security:
|
|
||||||
- api_key:
|
|
||||||
- ADMIN
|
|
||||||
- tokenizer:
|
|
||||||
- ADMIN
|
|
||||||
summary: Delete invite
|
|
||||||
tags:
|
|
||||||
- users
|
|
||||||
/api/v1/invite/{token}:
|
|
||||||
get:
|
|
||||||
deprecated: false
|
|
||||||
description: This endpoint gets an invite by token
|
|
||||||
operationId: GetInvite
|
|
||||||
parameters:
|
|
||||||
- in: path
|
|
||||||
name: token
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
responses:
|
|
||||||
"200":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
properties:
|
|
||||||
data:
|
|
||||||
$ref: '#/components/schemas/TypesInvite'
|
|
||||||
status:
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- status
|
|
||||||
- data
|
|
||||||
type: object
|
|
||||||
description: OK
|
|
||||||
"400":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Bad Request
|
|
||||||
"404":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Not Found
|
|
||||||
"500":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Internal Server Error
|
|
||||||
summary: Get invite
|
|
||||||
tags:
|
|
||||||
- users
|
|
||||||
/api/v1/invite/accept:
|
|
||||||
post:
|
|
||||||
deprecated: false
|
|
||||||
description: This endpoint accepts an invite by token
|
|
||||||
operationId: AcceptInvite
|
|
||||||
requestBody:
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/TypesPostableAcceptInvite'
|
|
||||||
responses:
|
|
||||||
"201":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
properties:
|
|
||||||
data:
|
|
||||||
$ref: '#/components/schemas/TypesUser'
|
|
||||||
status:
|
|
||||||
type: string
|
|
||||||
required:
|
|
||||||
- status
|
|
||||||
- data
|
|
||||||
type: object
|
|
||||||
description: Created
|
|
||||||
"400":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Bad Request
|
|
||||||
"404":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Not Found
|
|
||||||
"500":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Internal Server Error
|
|
||||||
summary: Accept invite
|
|
||||||
tags:
|
|
||||||
- users
|
|
||||||
/api/v1/invite/bulk:
|
/api/v1/invite/bulk:
|
||||||
post:
|
post:
|
||||||
deprecated: false
|
deprecated: false
|
||||||
@@ -4234,7 +4063,7 @@ paths:
|
|||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/RoletypesRole'
|
$ref: '#/components/schemas/AuthtypesRole'
|
||||||
type: array
|
type: array
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
@@ -4277,7 +4106,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/RoletypesPostableRole'
|
$ref: '#/components/schemas/AuthtypesPostableRole'
|
||||||
responses:
|
responses:
|
||||||
"201":
|
"201":
|
||||||
content:
|
content:
|
||||||
@@ -4422,7 +4251,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/RoletypesRole'
|
$ref: '#/components/schemas/AuthtypesRole'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -4470,7 +4299,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/RoletypesPatchableRole'
|
$ref: '#/components/schemas/AuthtypesPatchableRole'
|
||||||
responses:
|
responses:
|
||||||
"204":
|
"204":
|
||||||
content:
|
content:
|
||||||
@@ -5271,7 +5100,7 @@ paths:
|
|||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/TypesUser'
|
$ref: '#/components/schemas/TypesDeprecatedUser'
|
||||||
type: array
|
type: array
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
@@ -5369,7 +5198,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/TypesUser'
|
$ref: '#/components/schemas/TypesDeprecatedUser'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -5423,7 +5252,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/TypesUser'
|
$ref: '#/components/schemas/TypesDeprecatedUser'
|
||||||
responses:
|
responses:
|
||||||
"200":
|
"200":
|
||||||
content:
|
content:
|
||||||
@@ -5431,7 +5260,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/TypesUser'
|
$ref: '#/components/schemas/TypesDeprecatedUser'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -5489,7 +5318,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/TypesUser'
|
$ref: '#/components/schemas/TypesDeprecatedUser'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -5814,9 +5643,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Get ingestion keys for workspace
|
summary: Get ingestion keys for workspace
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -5864,9 +5693,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Create ingestion key for workspace
|
summary: Create ingestion key for workspace
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -5904,9 +5733,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Delete ingestion key for workspace
|
summary: Delete ingestion key for workspace
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -5948,9 +5777,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Update ingestion key for workspace
|
summary: Update ingestion key for workspace
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -6005,9 +5834,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Create limit for the ingestion key
|
summary: Create limit for the ingestion key
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -6045,9 +5874,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Delete limit for the ingestion key
|
summary: Delete limit for the ingestion key
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -6089,9 +5918,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Update limit for the ingestion key
|
summary: Update limit for the ingestion key
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
@@ -6149,9 +5978,9 @@ paths:
|
|||||||
description: Internal Server Error
|
description: Internal Server Error
|
||||||
security:
|
security:
|
||||||
- api_key:
|
- api_key:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
- tokenizer:
|
- tokenizer:
|
||||||
- ADMIN
|
- EDITOR
|
||||||
summary: Search ingestion keys for workspace
|
summary: Search ingestion keys for workspace
|
||||||
tags:
|
tags:
|
||||||
- gateway
|
- gateway
|
||||||
|
|||||||
@@ -74,37 +74,37 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
instrumentationtypes.CodeFunctionName: "getResults",
|
instrumentationtypes.CodeFunctionName: "getResults",
|
||||||
})
|
})
|
||||||
// TODO(srikanthccv): parallelize this?
|
// TODO(srikanthccv): parallelize this?
|
||||||
p.logger.InfoContext(ctx, "fetching results for current period", "anomaly_current_period_query", params.CurrentPeriodQuery)
|
p.logger.InfoContext(ctx, "fetching results for current period", slog.Any("anomaly_current_period_query", params.CurrentPeriodQuery))
|
||||||
currentPeriodResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.CurrentPeriodQuery)
|
currentPeriodResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.CurrentPeriodQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.logger.InfoContext(ctx, "fetching results for past period", "anomaly_past_period_query", params.PastPeriodQuery)
|
p.logger.InfoContext(ctx, "fetching results for past period", slog.Any("anomaly_past_period_query", params.PastPeriodQuery))
|
||||||
pastPeriodResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.PastPeriodQuery)
|
pastPeriodResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.PastPeriodQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.logger.InfoContext(ctx, "fetching results for current season", "anomaly_current_season_query", params.CurrentSeasonQuery)
|
p.logger.InfoContext(ctx, "fetching results for current season", slog.Any("anomaly_current_season_query", params.CurrentSeasonQuery))
|
||||||
currentSeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.CurrentSeasonQuery)
|
currentSeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.CurrentSeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.logger.InfoContext(ctx, "fetching results for past season", "anomaly_past_season_query", params.PastSeasonQuery)
|
p.logger.InfoContext(ctx, "fetching results for past season", slog.Any("anomaly_past_season_query", params.PastSeasonQuery))
|
||||||
pastSeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.PastSeasonQuery)
|
pastSeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.PastSeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.logger.InfoContext(ctx, "fetching results for past 2 season", "anomaly_past_2season_query", params.Past2SeasonQuery)
|
p.logger.InfoContext(ctx, "fetching results for past 2 season", slog.Any("anomaly_past_2season_query", params.Past2SeasonQuery))
|
||||||
past2SeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.Past2SeasonQuery)
|
past2SeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.Past2SeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
p.logger.InfoContext(ctx, "fetching results for past 3 season", "anomaly_past_3season_query", params.Past3SeasonQuery)
|
p.logger.InfoContext(ctx, "fetching results for past 3 season", slog.Any("anomaly_past_3season_query", params.Past3SeasonQuery))
|
||||||
past3SeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.Past3SeasonQuery)
|
past3SeasonResults, err := p.querier.QueryRange(ctx, orgID, ¶ms.Past3SeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -212,17 +212,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
|
|||||||
if predictedValue < 0 {
|
if predictedValue < 0 {
|
||||||
// this should not happen (except when the data has extreme outliers)
|
// this should not happen (except when the data has extreme outliers)
|
||||||
// we will use the moving avg of the previous period series in this case
|
// we will use the moving avg of the previous period series in this case
|
||||||
p.logger.WarnContext(ctx, "predicted value is less than 0 for series", "anomaly_predicted_value", predictedValue, "anomaly_labels", series.Labels)
|
p.logger.WarnContext(ctx, "predicted value is less than 0 for series", slog.Float64("anomaly_predicted_value", predictedValue), slog.Any("anomaly_labels", series.Labels))
|
||||||
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
p.logger.DebugContext(ctx, "predicted value for series",
|
p.logger.DebugContext(ctx, "predicted value for series",
|
||||||
"anomaly_moving_avg", movingAvg,
|
slog.Float64("anomaly_moving_avg", movingAvg),
|
||||||
"anomaly_avg", avg,
|
slog.Float64("anomaly_avg", avg),
|
||||||
"anomaly_mean", mean,
|
slog.Float64("anomaly_mean", mean),
|
||||||
"anomaly_labels", series.Labels,
|
slog.Any("anomaly_labels", series.Labels),
|
||||||
"anomaly_predicted_value", predictedValue,
|
slog.Float64("anomaly_predicted_value", predictedValue),
|
||||||
"anomaly_curr", curr.Value,
|
slog.Float64("anomaly_curr", curr.Value),
|
||||||
)
|
)
|
||||||
predictedSeries.Values = append(predictedSeries.Values, &qbtypes.TimeSeriesValue{
|
predictedSeries.Values = append(predictedSeries.Values, &qbtypes.TimeSeriesValue{
|
||||||
Timestamp: curr.Timestamp,
|
Timestamp: curr.Timestamp,
|
||||||
@@ -412,7 +412,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
|||||||
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
|
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
|
||||||
|
|
||||||
stdDev := p.getStdDev(currentSeasonSeries)
|
stdDev := p.getStdDev(currentSeasonSeries)
|
||||||
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
|
p.logger.InfoContext(ctx, "calculated standard deviation for series", slog.Float64("anomaly_std_dev", stdDev), slog.Any("anomaly_labels", series.Labels))
|
||||||
|
|
||||||
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
||||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||||
@@ -420,12 +420,12 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
|||||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||||
p.logger.InfoContext(ctx, "calculated mean for series",
|
p.logger.InfoContext(ctx, "calculated mean for series",
|
||||||
"anomaly_prev_series_avg", prevSeriesAvg,
|
slog.Float64("anomaly_prev_series_avg", prevSeriesAvg),
|
||||||
"anomaly_current_season_series_avg", currentSeasonSeriesAvg,
|
slog.Float64("anomaly_current_season_series_avg", currentSeasonSeriesAvg),
|
||||||
"anomaly_past_season_series_avg", pastSeasonSeriesAvg,
|
slog.Float64("anomaly_past_season_series_avg", pastSeasonSeriesAvg),
|
||||||
"anomaly_past_2season_series_avg", past2SeasonSeriesAvg,
|
slog.Float64("anomaly_past_2season_series_avg", past2SeasonSeriesAvg),
|
||||||
"anomaly_past_3season_series_avg", past3SeasonSeriesAvg,
|
slog.Float64("anomaly_past_3season_series_avg", past3SeasonSeriesAvg),
|
||||||
"anomaly_labels", series.Labels,
|
slog.Any("anomaly_labels", series.Labels),
|
||||||
)
|
)
|
||||||
|
|
||||||
predictedSeries := p.getPredictedSeries(
|
predictedSeries := p.getPredictedSeries(
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package oidccallbackauthn
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/authn"
|
"github.com/SigNoz/signoz/pkg/authn"
|
||||||
@@ -150,7 +151,7 @@ func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtype
|
|||||||
// Some IDPs return a single group as a string instead of an array
|
// Some IDPs return a single group as a string instead of an array
|
||||||
groups = append(groups, g)
|
groups = append(groups, g)
|
||||||
default:
|
default:
|
||||||
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
|
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", slog.String("type", fmt.Sprintf("%T", claimValue)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/licensing"
|
"github.com/SigNoz/signoz/pkg/licensing"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||||
@@ -23,7 +22,7 @@ type provider struct {
|
|||||||
pkgAuthzService authz.AuthZ
|
pkgAuthzService authz.AuthZ
|
||||||
openfgaServer *openfgaserver.Server
|
openfgaServer *openfgaserver.Server
|
||||||
licensing licensing.Licensing
|
licensing licensing.Licensing
|
||||||
store roletypes.Store
|
store authtypes.RoleStore
|
||||||
registry []authz.RegisterTypeable
|
registry []authz.RegisterTypeable
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,23 +81,23 @@ func (provider *provider) Write(ctx context.Context, additions []*openfgav1.Tupl
|
|||||||
return provider.openfgaServer.Write(ctx, additions, deletions)
|
return provider.openfgaServer.Write(ctx, additions, deletions)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*authtypes.Role, error) {
|
||||||
return provider.pkgAuthzService.Get(ctx, orgID, id)
|
return provider.pkgAuthzService.Get(ctx, orgID, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*authtypes.Role, error) {
|
||||||
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
|
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*authtypes.Role, error) {
|
||||||
return provider.pkgAuthzService.List(ctx, orgID)
|
return provider.pkgAuthzService.List(ctx, orgID)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*authtypes.Role, error) {
|
||||||
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
|
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*roletypes.Role, error) {
|
func (provider *provider) ListByOrgIDAndIDs(ctx context.Context, orgID valuer.UUID, ids []valuer.UUID) ([]*authtypes.Role, error) {
|
||||||
return provider.pkgAuthzService.ListByOrgIDAndIDs(ctx, orgID, ids)
|
return provider.pkgAuthzService.ListByOrgIDAndIDs(ctx, orgID, ids)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -114,7 +113,7 @@ func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names [
|
|||||||
return provider.pkgAuthzService.Revoke(ctx, orgID, names, subject)
|
return provider.pkgAuthzService.Revoke(ctx, orgID, names, subject)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
|
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*authtypes.Role) error {
|
||||||
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
|
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,16 +135,16 @@ func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context,
|
|||||||
return provider.Write(ctx, tuples, nil)
|
return provider.Write(ctx, tuples, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) error {
|
||||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
return provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) (*authtypes.Role, error) {
|
||||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||||
@@ -159,10 +158,10 @@ func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, ro
|
|||||||
}
|
}
|
||||||
|
|
||||||
if existingRole != nil {
|
if existingRole != nil {
|
||||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
return authtypes.NewRoleFromStorableRole(existingRole), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
err = provider.store.Create(ctx, authtypes.NewStorableRoleFromRole(role))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@@ -217,13 +216,13 @@ func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id
|
|||||||
return objects, nil
|
return objects, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *authtypes.Role) error {
|
||||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
return provider.store.Update(ctx, orgID, authtypes.NewStorableRoleFromRole(role))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||||
@@ -232,12 +231,12 @@ func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, n
|
|||||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
additionTuples, err := authtypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
deletionTuples, err := authtypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@@ -261,7 +260,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
role := authtypes.NewRoleFromStorableRole(storableRole)
|
||||||
err = role.ErrIfManaged()
|
err = role.ErrIfManaged()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -271,7 +270,7 @@ func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valu
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
||||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
return []authtypes.Typeable{authtypes.TypeableRole, authtypes.TypeableResourcesRoles}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||||
@@ -283,7 +282,7 @@ func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID va
|
|||||||
adminSubject,
|
adminSubject,
|
||||||
authtypes.RelationAssignee,
|
authtypes.RelationAssignee,
|
||||||
[]authtypes.Selector{
|
[]authtypes.Selector{
|
||||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAdminRoleName),
|
||||||
},
|
},
|
||||||
orgID,
|
orgID,
|
||||||
)
|
)
|
||||||
@@ -298,7 +297,7 @@ func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID va
|
|||||||
anonymousSubject,
|
anonymousSubject,
|
||||||
authtypes.RelationAssignee,
|
authtypes.RelationAssignee,
|
||||||
[]authtypes.Selector{
|
[]authtypes.Selector{
|
||||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
|
authtypes.MustNewSelector(authtypes.TypeRole, authtypes.SigNozAnonymousRoleName),
|
||||||
},
|
},
|
||||||
orgID,
|
orgID,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -3,8 +3,11 @@ package httplicensing
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"log/slog"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/tidwall/gjson"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
|
"github.com/SigNoz/signoz/ee/licensing/licensingstore/sqllicensingstore"
|
||||||
"github.com/SigNoz/signoz/pkg/analytics"
|
"github.com/SigNoz/signoz/pkg/analytics"
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
@@ -16,7 +19,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/SigNoz/signoz/pkg/zeus"
|
"github.com/SigNoz/signoz/pkg/zeus"
|
||||||
"github.com/tidwall/gjson"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type provider struct {
|
type provider struct {
|
||||||
@@ -55,7 +57,7 @@ func (provider *provider) Start(ctx context.Context) error {
|
|||||||
|
|
||||||
err := provider.Validate(ctx)
|
err := provider.Validate(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
|
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", errors.Attr(err))
|
||||||
}
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
@@ -65,7 +67,7 @@ func (provider *provider) Start(ctx context.Context) error {
|
|||||||
case <-tick.C:
|
case <-tick.C:
|
||||||
err := provider.Validate(ctx)
|
err := provider.Validate(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", "error", err)
|
provider.settings.Logger().ErrorContext(ctx, "failed to validate license from upstream server", errors.Attr(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -133,7 +135,7 @@ func (provider *provider) Refresh(ctx context.Context, organizationID valuer.UUI
|
|||||||
if errors.Ast(err, errors.TypeNotFound) {
|
if errors.Ast(err, errors.TypeNotFound) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
provider.settings.Logger().ErrorContext(ctx, "license validation failed", "org_id", organizationID.StringValue())
|
provider.settings.Logger().ErrorContext(ctx, "license validation failed", slog.String("org_id", organizationID.StringValue()))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -198,7 +200,10 @@ func (provider *provider) Checkout(ctx context.Context, organizationID valuer.UU
|
|||||||
|
|
||||||
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
|
response, err := provider.zeus.GetCheckoutURL(ctx, activeLicense.Key, body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate checkout session")
|
if errors.Ast(err, errors.TypeAlreadyExists) {
|
||||||
|
return nil, errors.WithAdditionalf(err, "checkout has already been completed for this account. Please click 'Refresh Status' to sync your subscription")
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
||||||
@@ -217,7 +222,7 @@ func (provider *provider) Portal(ctx context.Context, organizationID valuer.UUID
|
|||||||
|
|
||||||
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
|
response, err := provider.zeus.GetPortalURL(ctx, activeLicense.Key, body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to generate portal session")
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
return &licensetypes.GettableSubscription{RedirectURL: gjson.GetBytes(response, "url").String()}, nil
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -224,7 +223,7 @@ func (module *module) MustGetTypeables() []authtypes.Typeable {
|
|||||||
|
|
||||||
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
|
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
|
||||||
return map[string][]*authtypes.Transaction{
|
return map[string][]*authtypes.Transaction{
|
||||||
roletypes.SigNozAnonymousRoleName: {
|
authtypes.SigNozAnonymousRoleName: {
|
||||||
{
|
{
|
||||||
ID: valuer.GenerateUUID(),
|
ID: valuer.GenerateUUID(),
|
||||||
Relation: authtypes.RelationRead,
|
Relation: authtypes.RelationRead,
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"runtime/debug"
|
|
||||||
|
|
||||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
@@ -54,26 +53,6 @@ func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
defer func() {
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
stackTrace := string(debug.Stack())
|
|
||||||
|
|
||||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
|
||||||
|
|
||||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
|
||||||
"error", r,
|
|
||||||
"user", claims.UserID,
|
|
||||||
"payload", string(queryJSON),
|
|
||||||
"stacktrace", stackTrace,
|
|
||||||
)
|
|
||||||
|
|
||||||
render.Error(rw, errors.NewInternalf(
|
|
||||||
errors.CodeInternal,
|
|
||||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
if err := queryRangeRequest.Validate(); err != nil {
|
if err := queryRangeRequest.Validate(); err != nil {
|
||||||
render.Error(rw, err)
|
render.Error(rw, err)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package anomaly
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"log/slog"
|
||||||
"math"
|
"math"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -13,7 +14,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
"github.com/SigNoz/signoz/pkg/types/instrumentationtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -67,7 +67,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
instrumentationtypes.CodeNamespace: "anomaly",
|
instrumentationtypes.CodeNamespace: "anomaly",
|
||||||
instrumentationtypes.CodeFunctionName: "getResults",
|
instrumentationtypes.CodeFunctionName: "getResults",
|
||||||
})
|
})
|
||||||
zap.L().Info("fetching results for current period", zap.Any("currentPeriodQuery", params.CurrentPeriodQuery))
|
slog.InfoContext(ctx, "fetching results for current period", "current_period_query", params.CurrentPeriodQuery)
|
||||||
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
|
currentPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentPeriodQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -78,7 +78,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("fetching results for past period", zap.Any("pastPeriodQuery", params.PastPeriodQuery))
|
slog.InfoContext(ctx, "fetching results for past period", "past_period_query", params.PastPeriodQuery)
|
||||||
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
|
pastPeriodResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastPeriodQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -89,7 +89,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("fetching results for current season", zap.Any("currentSeasonQuery", params.CurrentSeasonQuery))
|
slog.InfoContext(ctx, "fetching results for current season", "current_season_query", params.CurrentSeasonQuery)
|
||||||
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
|
currentSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.CurrentSeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -100,7 +100,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("fetching results for past season", zap.Any("pastSeasonQuery", params.PastSeasonQuery))
|
slog.InfoContext(ctx, "fetching results for past season", "past_season_query", params.PastSeasonQuery)
|
||||||
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
|
pastSeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.PastSeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -111,7 +111,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("fetching results for past 2 season", zap.Any("past2SeasonQuery", params.Past2SeasonQuery))
|
slog.InfoContext(ctx, "fetching results for past 2 season", "past_2_season_query", params.Past2SeasonQuery)
|
||||||
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
|
past2SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past2SeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -122,7 +122,7 @@ func (p *BaseSeasonalProvider) getResults(ctx context.Context, orgID valuer.UUID
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("fetching results for past 3 season", zap.Any("past3SeasonQuery", params.Past3SeasonQuery))
|
slog.InfoContext(ctx, "fetching results for past 3 season", "past_3_season_query", params.Past3SeasonQuery)
|
||||||
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
|
past3SeasonResults, _, err := p.querierV2.QueryRange(ctx, orgID, params.Past3SeasonQuery)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -235,17 +235,17 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
|
|||||||
if predictedValue < 0 {
|
if predictedValue < 0 {
|
||||||
// this should not happen (except when the data has extreme outliers)
|
// this should not happen (except when the data has extreme outliers)
|
||||||
// we will use the moving avg of the previous period series in this case
|
// we will use the moving avg of the previous period series in this case
|
||||||
zap.L().Warn("predictedValue is less than 0", zap.Float64("predictedValue", predictedValue), zap.Any("labels", series.Labels))
|
slog.Warn("predicted value is less than 0", "predicted_value", predictedValue, "labels", series.Labels)
|
||||||
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
predictedValue = p.getMovingAvg(prevSeries, movingAvgWindowSize, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Debug("predictedSeries",
|
slog.Debug("predicted series",
|
||||||
zap.Float64("movingAvg", movingAvg),
|
"moving_avg", movingAvg,
|
||||||
zap.Float64("avg", avg),
|
"avg", avg,
|
||||||
zap.Float64("mean", mean),
|
"mean", mean,
|
||||||
zap.Any("labels", series.Labels),
|
"labels", series.Labels,
|
||||||
zap.Float64("predictedValue", predictedValue),
|
"predicted_value", predictedValue,
|
||||||
zap.Float64("curr", curr.Value),
|
"curr", curr.Value,
|
||||||
)
|
)
|
||||||
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
|
predictedSeries.Points = append(predictedSeries.Points, v3.Point{
|
||||||
Timestamp: curr.Timestamp,
|
Timestamp: curr.Timestamp,
|
||||||
@@ -418,7 +418,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
|||||||
|
|
||||||
for _, series := range result.Series {
|
for _, series := range result.Series {
|
||||||
stdDev := p.getStdDev(series)
|
stdDev := p.getStdDev(series)
|
||||||
zap.L().Info("stdDev", zap.Float64("stdDev", stdDev), zap.Any("labels", series.Labels))
|
slog.InfoContext(ctx, "computed standard deviation", "std_dev", stdDev, "labels", series.Labels)
|
||||||
|
|
||||||
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
|
pastPeriodSeries := p.getMatchingSeries(pastPeriodResult, series)
|
||||||
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
|
currentSeasonSeries := p.getMatchingSeries(currentSeasonResult, series)
|
||||||
@@ -431,7 +431,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
|||||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||||
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
past2SeasonSeriesAvg := p.getAvg(past2SeasonSeries)
|
||||||
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
past3SeasonSeriesAvg := p.getAvg(past3SeasonSeries)
|
||||||
zap.L().Info("getAvg", zap.Float64("prevSeriesAvg", prevSeriesAvg), zap.Float64("currentSeasonSeriesAvg", currentSeasonSeriesAvg), zap.Float64("pastSeasonSeriesAvg", pastSeasonSeriesAvg), zap.Float64("past2SeasonSeriesAvg", past2SeasonSeriesAvg), zap.Float64("past3SeasonSeriesAvg", past3SeasonSeriesAvg), zap.Any("labels", series.Labels))
|
slog.InfoContext(ctx, "computed averages", "prev_series_avg", prevSeriesAvg, "current_season_series_avg", currentSeasonSeriesAvg, "past_season_series_avg", pastSeasonSeriesAvg, "past_2_season_series_avg", past2SeasonSeriesAvg, "past_3_season_series_avg", past3SeasonSeriesAvg, "labels", series.Labels)
|
||||||
|
|
||||||
predictedSeries := p.getPredictedSeries(
|
predictedSeries := p.getPredictedSeries(
|
||||||
series,
|
series,
|
||||||
|
|||||||
@@ -10,6 +10,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||||
@@ -18,7 +20,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/gorilla/mux"
|
"github.com/gorilla/mux"
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type CloudIntegrationConnectionParamsResponse struct {
|
type CloudIntegrationConnectionParamsResponse struct {
|
||||||
@@ -71,7 +72,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
|||||||
// Return the API Key (PAT) even if the rest of the params can not be deduced.
|
// Return the API Key (PAT) even if the rest of the params can not be deduced.
|
||||||
// Params not returned from here will be requested from the user via form inputs.
|
// Params not returned from here will be requested from the user via form inputs.
|
||||||
// This enables gracefully degraded but working experience even for non-cloud deployments.
|
// This enables gracefully degraded but working experience even for non-cloud deployments.
|
||||||
zap.L().Info("ingestion params and signoz api url can not be deduced since no license was found")
|
slog.InfoContext(r.Context(), "ingestion params and signoz api url can not be deduced since no license was found")
|
||||||
ah.Respond(w, result)
|
ah.Respond(w, result)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -103,7 +104,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
|||||||
result.IngestionKey = ingestionKey
|
result.IngestionKey = ingestionKey
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
zap.L().Info("ingestion key can't be deduced since no gateway url has been configured")
|
slog.InfoContext(r.Context(), "ingestion key can't be deduced since no gateway url has been configured")
|
||||||
}
|
}
|
||||||
|
|
||||||
ah.Respond(w, result)
|
ah.Respond(w, result)
|
||||||
@@ -126,7 +127,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
allPats, err := ah.Signoz.Modules.User.ListAPIKeys(ctx, orgIdUUID)
|
allPats, err := ah.Signoz.Modules.UserSetter.ListAPIKeys(ctx, orgIdUUID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", basemodel.InternalError(fmt.Errorf(
|
return "", basemodel.InternalError(fmt.Errorf(
|
||||||
"couldn't list PATs: %w", err,
|
"couldn't list PATs: %w", err,
|
||||||
@@ -138,9 +139,8 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info(
|
slog.InfoContext(ctx, "no PAT found for cloud integration, creating a new one",
|
||||||
"no PAT found for cloud integration, creating a new one",
|
"cloud_provider", cloudProvider,
|
||||||
zap.String("cloudProvider", cloudProvider),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
newPAT, err := types.NewStorableAPIKey(
|
newPAT, err := types.NewStorableAPIKey(
|
||||||
@@ -155,7 +155,7 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
err = ah.Signoz.Modules.User.CreateAPIKey(ctx, newPAT)
|
err = ah.Signoz.Modules.UserSetter.CreateAPIKey(ctx, newPAT)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", basemodel.InternalError(fmt.Errorf(
|
return "", basemodel.InternalError(fmt.Errorf(
|
||||||
"couldn't create cloud integration PAT: %w", err,
|
"couldn't create cloud integration PAT: %w", err,
|
||||||
@@ -170,14 +170,19 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
|||||||
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
|
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
|
||||||
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
|
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
|
||||||
|
|
||||||
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId), types.UserStatusActive)
|
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, valuer.MustNewUUID(orgId), types.UserStatusActive)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
|
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
|
||||||
|
|
||||||
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
|
cloudIntegrationUser, err = ah.Signoz.Modules.UserSetter.GetOrCreateUser(
|
||||||
|
ctx,
|
||||||
|
cloudIntegrationUser,
|
||||||
|
user.WithFactorPassword(password),
|
||||||
|
user.WithRoleNames([]string{authtypes.SigNozViewerRoleName}),
|
||||||
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
|
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||||
}
|
}
|
||||||
@@ -287,9 +292,8 @@ func getOrCreateCloudProviderIngestionKey(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info(
|
slog.InfoContext(ctx, "no existing ingestion key found for cloud integration, creating a new one",
|
||||||
"no existing ingestion key found for cloud integration, creating a new one",
|
"cloud_provider", cloudProvider,
|
||||||
zap.String("cloudProvider", cloudProvider),
|
|
||||||
)
|
)
|
||||||
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
|
createKeyResult, apiErr := requestGateway[createIngestionKeyResponse](
|
||||||
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
|
ctx, gatewayUrl, licenseKey, "/v1/workspaces/me/keys",
|
||||||
|
|||||||
@@ -4,10 +4,15 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
signozerrors "github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||||
"github.com/SigNoz/signoz/pkg/flagger"
|
"github.com/SigNoz/signoz/pkg/flagger"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
@@ -15,7 +20,6 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||||
@@ -35,23 +39,23 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if constants.FetchFeatures == "true" {
|
if constants.FetchFeatures == "true" {
|
||||||
zap.L().Debug("fetching license")
|
slog.DebugContext(ctx, "fetching license")
|
||||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to fetch license", zap.Error(err))
|
slog.ErrorContext(ctx, "failed to fetch license", signozerrors.Attr(err))
|
||||||
} else if license == nil {
|
} else if license == nil {
|
||||||
zap.L().Debug("no active license found")
|
slog.DebugContext(ctx, "no active license found")
|
||||||
} else {
|
} else {
|
||||||
licenseKey := license.Key
|
licenseKey := license.Key
|
||||||
|
|
||||||
zap.L().Debug("fetching zeus features")
|
slog.DebugContext(ctx, "fetching zeus features")
|
||||||
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
|
zeusFeatures, err := fetchZeusFeatures(constants.ZeusFeaturesURL, licenseKey)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
zap.L().Debug("fetched zeus features", zap.Any("features", zeusFeatures))
|
slog.DebugContext(ctx, "fetched zeus features", "features", zeusFeatures)
|
||||||
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
|
// merge featureSet and zeusFeatures in featureSet with higher priority to zeusFeatures
|
||||||
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
|
featureSet = MergeFeatureSets(zeusFeatures, featureSet)
|
||||||
} else {
|
} else {
|
||||||
zap.L().Error("failed to fetch zeus features", zap.Error(err))
|
slog.ErrorContext(ctx, "failed to fetch zeus features", signozerrors.Attr(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/http/render"
|
"github.com/SigNoz/signoz/pkg/http/render"
|
||||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||||
@@ -14,7 +15,7 @@ import (
|
|||||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"go.uber.org/zap"
|
"log/slog"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||||
@@ -35,7 +36,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
|||||||
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
|
queryRangeParams, apiErrorObj := baseapp.ParseQueryRangeParams(r)
|
||||||
|
|
||||||
if apiErrorObj != nil {
|
if apiErrorObj != nil {
|
||||||
zap.L().Error("error parsing metric query range params", zap.Error(apiErrorObj.Err))
|
slog.ErrorContext(r.Context(), "error parsing metric query range params", errors.Attr(apiErrorObj.Err))
|
||||||
RespondError(w, apiErrorObj, nil)
|
RespondError(w, apiErrorObj, nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -44,7 +45,7 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
|||||||
// add temporality for each metric
|
// add temporality for each metric
|
||||||
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
temporalityErr := aH.PopulateTemporality(r.Context(), orgID, queryRangeParams)
|
||||||
if temporalityErr != nil {
|
if temporalityErr != nil {
|
||||||
zap.L().Error("Error while adding temporality for metrics", zap.Error(temporalityErr))
|
slog.ErrorContext(r.Context(), "error while adding temporality for metrics", errors.Attr(temporalityErr))
|
||||||
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
RespondError(w, &model.ApiError{Typ: model.ErrorInternal, Err: temporalityErr}, nil)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,19 +5,23 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
_ "net/http/pprof" // http profiler
|
|
||||||
"slices"
|
"slices"
|
||||||
|
|
||||||
|
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||||
|
"go.opentelemetry.io/otel/propagation"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
|
||||||
"go.opentelemetry.io/otel/propagation"
|
|
||||||
|
|
||||||
"github.com/gorilla/handlers"
|
"github.com/gorilla/handlers"
|
||||||
|
|
||||||
|
"github.com/rs/cors"
|
||||||
|
"github.com/soheilhy/cmux"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||||
@@ -31,8 +35,8 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
"github.com/SigNoz/signoz/pkg/web"
|
"github.com/SigNoz/signoz/pkg/web"
|
||||||
"github.com/rs/cors"
|
|
||||||
"github.com/soheilhy/cmux"
|
"log/slog"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
"github.com/SigNoz/signoz/pkg/query-service/agentConf"
|
||||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||||
@@ -47,7 +51,6 @@ import (
|
|||||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Server runs HTTP, Mux and a grpc server
|
// Server runs HTTP, Mux and a grpc server
|
||||||
@@ -83,6 +86,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
reader := clickhouseReader.NewReader(
|
reader := clickhouseReader.NewReader(
|
||||||
|
signoz.Instrumentation.Logger(),
|
||||||
signoz.SQLStore,
|
signoz.SQLStore,
|
||||||
signoz.TelemetryStore,
|
signoz.TelemetryStore,
|
||||||
signoz.Prometheus,
|
signoz.Prometheus,
|
||||||
@@ -206,6 +210,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
|||||||
r := baseapp.NewRouter()
|
r := baseapp.NewRouter()
|
||||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)
|
||||||
|
|
||||||
|
r.Use(middleware.NewRecovery(s.signoz.Instrumentation.Logger()).Wrap)
|
||||||
r.Use(otelmux.Middleware(
|
r.Use(otelmux.Middleware(
|
||||||
"apiserver",
|
"apiserver",
|
||||||
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
|
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
|
||||||
@@ -214,10 +219,8 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
|||||||
otelmux.WithFilter(func(r *http.Request) bool {
|
otelmux.WithFilter(func(r *http.Request) bool {
|
||||||
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
|
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
|
||||||
}),
|
}),
|
||||||
otelmux.WithPublicEndpoint(),
|
|
||||||
))
|
))
|
||||||
r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap)
|
r.Use(middleware.NewIdentN(s.signoz.IdentNResolver, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
|
||||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
|
||||||
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
||||||
s.config.APIServer.Timeout.ExcludedRoutes,
|
s.config.APIServer.Timeout.ExcludedRoutes,
|
||||||
s.config.APIServer.Timeout.Default,
|
s.config.APIServer.Timeout.Default,
|
||||||
@@ -278,7 +281,7 @@ func (s *Server) initListeners() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
|
slog.Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -298,31 +301,22 @@ func (s *Server) Start(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
zap.L().Info("Starting HTTP server", zap.Int("port", httpPort), zap.String("addr", s.httpHostPort))
|
slog.Info("Starting HTTP server", "port", httpPort, "addr", s.httpHostPort)
|
||||||
|
|
||||||
switch err := s.httpServer.Serve(s.httpConn); err {
|
switch err := s.httpServer.Serve(s.httpConn); err {
|
||||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||||
// normal exit, nothing to do
|
// normal exit, nothing to do
|
||||||
default:
|
default:
|
||||||
zap.L().Error("Could not start HTTP server", zap.Error(err))
|
slog.Error("Could not start HTTP server", errors.Attr(err))
|
||||||
}
|
}
|
||||||
s.unavailableChannel <- healthcheck.Unavailable
|
s.unavailableChannel <- healthcheck.Unavailable
|
||||||
}()
|
}()
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
zap.L().Info("Starting pprof server", zap.String("addr", baseconst.DebugHttpPort))
|
slog.Info("Starting OpAmp Websocket server", "addr", baseconst.OpAmpWsEndpoint)
|
||||||
|
|
||||||
err = http.ListenAndServe(baseconst.DebugHttpPort, nil)
|
|
||||||
if err != nil {
|
|
||||||
zap.L().Error("Could not start pprof server", zap.Error(err))
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
go func() {
|
|
||||||
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
|
||||||
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("opamp ws server failed to start", zap.Error(err))
|
slog.Error("opamp ws server failed to start", errors.Attr(err))
|
||||||
s.unavailableChannel <- healthcheck.Unavailable
|
s.unavailableChannel <- healthcheck.Unavailable
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
@@ -358,10 +352,9 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
|||||||
MetadataStore: metadataStore,
|
MetadataStore: metadataStore,
|
||||||
Prometheus: prometheus,
|
Prometheus: prometheus,
|
||||||
Context: context.Background(),
|
Context: context.Background(),
|
||||||
Logger: zap.L(),
|
|
||||||
Reader: ch,
|
Reader: ch,
|
||||||
Querier: querier,
|
Querier: querier,
|
||||||
SLogger: providerSettings.Logger,
|
Logger: providerSettings.Logger,
|
||||||
Cache: cache,
|
Cache: cache,
|
||||||
EvalDelay: baseconst.GetEvalDelay(),
|
EvalDelay: baseconst.GetEvalDelay(),
|
||||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||||
@@ -380,7 +373,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
|||||||
return nil, fmt.Errorf("rule manager error: %v", err)
|
return nil, fmt.Errorf("rule manager error: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("rules manager is ready")
|
slog.Info("rules manager is ready")
|
||||||
|
|
||||||
return manager, nil
|
return manager, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import (
|
|||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||||
"github.com/SigNoz/signoz/pkg/cache"
|
"github.com/SigNoz/signoz/pkg/cache"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
"github.com/SigNoz/signoz/pkg/transition"
|
"github.com/SigNoz/signoz/pkg/transition"
|
||||||
@@ -308,7 +309,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
|||||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||||
// In case of error we log the error and continue with the original series
|
// In case of error we log the error and continue with the original series
|
||||||
if filterErr != nil {
|
if filterErr != nil {
|
||||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
r.logger.ErrorContext(ctx, "Error filtering new series, ", errors.Attr(filterErr), "rule_name", r.Name())
|
||||||
} else {
|
} else {
|
||||||
seriesToProcess = filteredSeries
|
seriesToProcess = filteredSeries
|
||||||
}
|
}
|
||||||
@@ -391,7 +392,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
|||||||
result, err := tmpl.Expand()
|
result, err := tmpl.Expand()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||||
r.logger.ErrorContext(ctx, "Expanding alert template failed", "error", err, "data", tmplData, "rule_name", r.Name())
|
r.logger.ErrorContext(ctx, "Expanding alert template failed", errors.Attr(err), "data", tmplData, "rule_name", r.Name())
|
||||||
}
|
}
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
@@ -467,7 +468,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
|||||||
for fp, a := range r.Active {
|
for fp, a := range r.Active {
|
||||||
labelsJSON, err := json.Marshal(a.QueryResultLables)
|
labelsJSON, err := json.Marshal(a.QueryResultLables)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
r.logger.ErrorContext(ctx, "error marshaling labels", "error", err, "labels", a.Labels)
|
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "labels", a.Labels)
|
||||||
}
|
}
|
||||||
if _, ok := resultFPs[fp]; !ok {
|
if _, ok := resultFPs[fp]; !ok {
|
||||||
// If the alert was previously firing, keep it around for a given
|
// If the alert was previously firing, keep it around for a given
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package rules
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"log/slog"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@@ -116,7 +117,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
|||||||
|
|
||||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||||
|
|
||||||
rule, err := NewAnomalyRule(
|
rule, err := NewAnomalyRule(
|
||||||
"test-anomaly-rule",
|
"test-anomaly-rule",
|
||||||
@@ -247,7 +248,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
|||||||
|
|
||||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||||
|
|
||||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|||||||
@@ -6,14 +6,16 @@ import (
|
|||||||
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
"github.com/google/uuid"
|
|
||||||
"go.uber.org/zap"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
|
func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error) {
|
||||||
@@ -34,7 +36,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
|||||||
opts.Rule,
|
opts.Rule,
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.Querier,
|
opts.Querier,
|
||||||
opts.SLogger,
|
opts.Logger,
|
||||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||||
baserules.WithSQLStore(opts.SQLStore),
|
baserules.WithSQLStore(opts.SQLStore),
|
||||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||||
@@ -57,7 +59,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
|||||||
ruleId,
|
ruleId,
|
||||||
opts.OrgID,
|
opts.OrgID,
|
||||||
opts.Rule,
|
opts.Rule,
|
||||||
opts.SLogger,
|
opts.Logger,
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.ManagerOpts.Prometheus,
|
opts.ManagerOpts.Prometheus,
|
||||||
baserules.WithSQLStore(opts.SQLStore),
|
baserules.WithSQLStore(opts.SQLStore),
|
||||||
@@ -82,7 +84,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
|||||||
opts.Rule,
|
opts.Rule,
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.Querier,
|
opts.Querier,
|
||||||
opts.SLogger,
|
opts.Logger,
|
||||||
opts.Cache,
|
opts.Cache,
|
||||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||||
baserules.WithSQLStore(opts.SQLStore),
|
baserules.WithSQLStore(opts.SQLStore),
|
||||||
@@ -142,7 +144,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
parsedRule,
|
parsedRule,
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.Querier,
|
opts.Querier,
|
||||||
opts.SLogger,
|
opts.Logger,
|
||||||
baserules.WithSendAlways(),
|
baserules.WithSendAlways(),
|
||||||
baserules.WithSendUnmatched(),
|
baserules.WithSendUnmatched(),
|
||||||
baserules.WithSQLStore(opts.SQLStore),
|
baserules.WithSQLStore(opts.SQLStore),
|
||||||
@@ -151,7 +153,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to prepare a new threshold rule for test", zap.String("name", alertname), zap.Error(err))
|
slog.Error("failed to prepare a new threshold rule for test", "name", alertname, errors.Attr(err))
|
||||||
return 0, basemodel.BadRequest(err)
|
return 0, basemodel.BadRequest(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -162,7 +164,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
alertname,
|
alertname,
|
||||||
opts.OrgID,
|
opts.OrgID,
|
||||||
parsedRule,
|
parsedRule,
|
||||||
opts.SLogger,
|
opts.Logger,
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.ManagerOpts.Prometheus,
|
opts.ManagerOpts.Prometheus,
|
||||||
baserules.WithSendAlways(),
|
baserules.WithSendAlways(),
|
||||||
@@ -173,7 +175,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
)
|
)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to prepare a new promql rule for test", zap.String("name", alertname), zap.Error(err))
|
slog.Error("failed to prepare a new promql rule for test", "name", alertname, errors.Attr(err))
|
||||||
return 0, basemodel.BadRequest(err)
|
return 0, basemodel.BadRequest(err)
|
||||||
}
|
}
|
||||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||||
@@ -184,7 +186,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
parsedRule,
|
parsedRule,
|
||||||
opts.Reader,
|
opts.Reader,
|
||||||
opts.Querier,
|
opts.Querier,
|
||||||
opts.SLogger,
|
opts.Logger,
|
||||||
opts.Cache,
|
opts.Cache,
|
||||||
baserules.WithSendAlways(),
|
baserules.WithSendAlways(),
|
||||||
baserules.WithSendUnmatched(),
|
baserules.WithSendUnmatched(),
|
||||||
@@ -193,7 +195,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, errors.Attr(err))
|
||||||
return 0, basemodel.BadRequest(err)
|
return 0, basemodel.BadRequest(err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -205,7 +207,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
|||||||
|
|
||||||
alertsFound, err := rule.Eval(ctx, ts)
|
alertsFound, err := rule.Eval(ctx, ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
slog.Error("evaluating rule failed", "rule", rule.Name(), errors.Attr(err))
|
||||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||||
}
|
}
|
||||||
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)
|
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)
|
||||||
|
|||||||
@@ -80,6 +80,21 @@ func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
|
|||||||
alertDataRows := cmock.NewRows(cols, tc.Values)
|
alertDataRows := cmock.NewRows(cols, tc.Values)
|
||||||
|
|
||||||
mock := telemetryStore.Mock()
|
mock := telemetryStore.Mock()
|
||||||
|
// Mock metadata queries for FetchTemporalityAndTypeMulti
|
||||||
|
// First query: fetchMetricsTemporalityAndType (from signoz_metrics time series table)
|
||||||
|
metadataCols := []cmock.ColumnType{
|
||||||
|
{Name: "metric_name", Type: "String"},
|
||||||
|
{Name: "temporality", Type: "String"},
|
||||||
|
{Name: "type", Type: "String"},
|
||||||
|
{Name: "is_monotonic", Type: "Bool"},
|
||||||
|
}
|
||||||
|
metadataRows := cmock.NewRows(metadataCols, [][]any{
|
||||||
|
{"probe_success", metrictypes.Unspecified, metrictypes.GaugeType, false},
|
||||||
|
})
|
||||||
|
mock.ExpectQuery("*distributed_time_series_v4*").WithArgs(nil, nil, nil).WillReturnRows(metadataRows)
|
||||||
|
// Second query: fetchMeterSourceMetricsTemporalityAndType (from signoz_meter table)
|
||||||
|
emptyMetadataRows := cmock.NewRows(metadataCols, [][]any{})
|
||||||
|
mock.ExpectQuery("*meter*").WithArgs(nil).WillReturnRows(emptyMetadataRows)
|
||||||
|
|
||||||
// Generate query arguments for the metric query
|
// Generate query arguments for the metric query
|
||||||
evalTime := time.Now().UTC()
|
evalTime := time.Now().UTC()
|
||||||
|
|||||||
@@ -8,13 +8,14 @@ import (
|
|||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"log/slog"
|
||||||
|
|
||||||
"github.com/ClickHouse/clickhouse-go/v2"
|
"github.com/ClickHouse/clickhouse-go/v2"
|
||||||
"github.com/go-co-op/gocron"
|
"github.com/go-co-op/gocron"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
|
|
||||||
"go.uber.org/zap"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/licensing"
|
"github.com/SigNoz/signoz/pkg/licensing"
|
||||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||||
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
|
"github.com/SigNoz/signoz/pkg/query-service/utils/encryption"
|
||||||
@@ -76,19 +77,19 @@ func (lm *Manager) Start(ctx context.Context) error {
|
|||||||
func (lm *Manager) UploadUsage(ctx context.Context) {
|
func (lm *Manager) UploadUsage(ctx context.Context) {
|
||||||
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
|
organizations, err := lm.orgGetter.ListByOwnedKeyRange(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to get organizations", zap.Error(err))
|
slog.ErrorContext(ctx, "failed to get organizations", errors.Attr(err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for _, organization := range organizations {
|
for _, organization := range organizations {
|
||||||
// check if license is present or not
|
// check if license is present or not
|
||||||
license, err := lm.licenseService.GetActive(ctx, organization.ID)
|
license, err := lm.licenseService.GetActive(ctx, organization.ID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("failed to get active license", zap.Error(err))
|
slog.ErrorContext(ctx, "failed to get active license", errors.Attr(err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if license == nil {
|
if license == nil {
|
||||||
// we will not start the usage reporting if license is not present.
|
// we will not start the usage reporting if license is not present.
|
||||||
zap.L().Info("no license present, skipping usage reporting")
|
slog.InfoContext(ctx, "no license present, skipping usage reporting")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -115,7 +116,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
|||||||
dbusages := []model.UsageDB{}
|
dbusages := []model.UsageDB{}
|
||||||
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
err := lm.clickhouseConn.Select(ctx, &dbusages, fmt.Sprintf(query, db, db), time.Now().Add(-(24 * time.Hour)))
|
||||||
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
if err != nil && !strings.Contains(err.Error(), "doesn't exist") {
|
||||||
zap.L().Error("failed to get usage from clickhouse: %v", zap.Error(err))
|
slog.ErrorContext(ctx, "failed to get usage from clickhouse", errors.Attr(err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
for _, u := range dbusages {
|
for _, u := range dbusages {
|
||||||
@@ -125,24 +126,24 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if len(usages) <= 0 {
|
if len(usages) <= 0 {
|
||||||
zap.L().Info("no snapshots to upload, skipping.")
|
slog.InfoContext(ctx, "no snapshots to upload, skipping")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
zap.L().Info("uploading usage data")
|
slog.InfoContext(ctx, "uploading usage data")
|
||||||
|
|
||||||
usagesPayload := []model.Usage{}
|
usagesPayload := []model.Usage{}
|
||||||
for _, usage := range usages {
|
for _, usage := range usages {
|
||||||
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
usageDataBytes, err := encryption.Decrypt([]byte(usage.ExporterID[:32]), []byte(usage.Data))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("error while decrypting usage data: %v", zap.Error(err))
|
slog.ErrorContext(ctx, "error while decrypting usage data", errors.Attr(err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
usageData := model.Usage{}
|
usageData := model.Usage{}
|
||||||
err = json.Unmarshal(usageDataBytes, &usageData)
|
err = json.Unmarshal(usageDataBytes, &usageData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
zap.L().Error("error while unmarshalling usage data: %v", zap.Error(err))
|
slog.ErrorContext(ctx, "error while unmarshalling usage data", errors.Attr(err))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -163,13 +164,13 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
|||||||
|
|
||||||
body, errv2 := json.Marshal(payload)
|
body, errv2 := json.Marshal(payload)
|
||||||
if errv2 != nil {
|
if errv2 != nil {
|
||||||
zap.L().Error("error while marshalling usage payload: %v", zap.Error(errv2))
|
slog.ErrorContext(ctx, "error while marshalling usage payload", errors.Attr(errv2))
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
errv2 = lm.zeus.PutMeters(ctx, payload.LicenseKey.String(), body)
|
||||||
if errv2 != nil {
|
if errv2 != nil {
|
||||||
zap.L().Error("failed to upload usage: %v", zap.Error(errv2))
|
slog.ErrorContext(ctx, "failed to upload usage", errors.Attr(errv2))
|
||||||
// not returning error here since it is captured in the failed count
|
// not returning error here since it is captured in the failed count
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -179,7 +180,7 @@ func (lm *Manager) UploadUsage(ctx context.Context) {
|
|||||||
func (lm *Manager) Stop(ctx context.Context) {
|
func (lm *Manager) Stop(ctx context.Context) {
|
||||||
lm.scheduler.Stop()
|
lm.scheduler.Stop()
|
||||||
|
|
||||||
zap.L().Info("sending usage data before shutting down")
|
slog.InfoContext(ctx, "sending usage data before shutting down")
|
||||||
// send usage before shutting down
|
// send usage before shutting down
|
||||||
lm.UploadUsage(ctx)
|
lm.UploadUsage(ctx)
|
||||||
atomic.StoreUint32(&locker, stateUnlocked)
|
atomic.StoreUint32(&locker, stateUnlocked)
|
||||||
|
|||||||
@@ -4,10 +4,12 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
|
||||||
|
"github.com/uptrace/bun"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/uptrace/bun"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type provider struct {
|
type provider struct {
|
||||||
@@ -32,9 +34,9 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
|||||||
fmter: fmter,
|
fmter: fmter,
|
||||||
settings: settings,
|
settings: settings,
|
||||||
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
|
operator: sqlschema.NewOperator(fmter, sqlschema.OperatorSupport{
|
||||||
DropConstraint: true,
|
SCreateAndDropConstraint: true,
|
||||||
ColumnIfNotExistsExists: true,
|
SAlterTableAddAndDropColumnIfNotExistsAndExists: true,
|
||||||
AlterColumnSetNotNull: true,
|
SAlterTableAlterColumnSetAndDrop: true,
|
||||||
}),
|
}),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
@@ -72,8 +74,9 @@ WHERE
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(columns) == 0 {
|
if len(columns) == 0 {
|
||||||
return nil, nil, sql.ErrNoRows
|
return nil, nil, provider.sqlstore.WrapNotFoundErrf(sql.ErrNoRows, errors.CodeNotFound, "table (%s) not found", tableName)
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
||||||
@@ -111,7 +114,7 @@ WHERE
|
|||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if err := constraintsRows.Close(); err != nil {
|
if err := constraintsRows.Close(); err != nil {
|
||||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -172,7 +175,7 @@ WHERE
|
|||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if err := foreignKeyConstraintsRows.Close(); err != nil {
|
if err := foreignKeyConstraintsRows.Close(); err != nil {
|
||||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
@@ -220,7 +223,9 @@ SELECT
|
|||||||
ci.relname AS index_name,
|
ci.relname AS index_name,
|
||||||
i.indisunique AS unique,
|
i.indisunique AS unique,
|
||||||
i.indisprimary AS primary,
|
i.indisprimary AS primary,
|
||||||
a.attname AS column_name
|
a.attname AS column_name,
|
||||||
|
array_position(i.indkey, a.attnum) AS column_position,
|
||||||
|
pg_get_expr(i.indpred, i.indrelid) AS predicate
|
||||||
FROM
|
FROM
|
||||||
pg_index i
|
pg_index i
|
||||||
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
|
LEFT JOIN pg_class ct ON ct.oid = i.indrelid
|
||||||
@@ -231,18 +236,24 @@ WHERE
|
|||||||
a.attnum = ANY(i.indkey)
|
a.attnum = ANY(i.indkey)
|
||||||
AND con.oid IS NULL
|
AND con.oid IS NULL
|
||||||
AND ct.relkind = 'r'
|
AND ct.relkind = 'r'
|
||||||
AND ct.relname = ?`, string(name))
|
AND ct.relname = ?
|
||||||
|
ORDER BY index_name, column_position`, string(name))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, provider.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "no indices for table (%s) found", name)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer func() {
|
defer func() {
|
||||||
if err := rows.Close(); err != nil {
|
if err := rows.Close(); err != nil {
|
||||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
provider.settings.Logger().ErrorContext(ctx, "error closing rows", errors.Attr(err))
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
uniqueIndicesMap := make(map[string]*sqlschema.UniqueIndex)
|
type indexEntry struct {
|
||||||
|
columns []sqlschema.ColumnName
|
||||||
|
predicate *string
|
||||||
|
}
|
||||||
|
|
||||||
|
uniqueIndicesMap := make(map[string]*indexEntry)
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var (
|
var (
|
||||||
tableName string
|
tableName string
|
||||||
@@ -250,27 +261,53 @@ WHERE
|
|||||||
unique bool
|
unique bool
|
||||||
primary bool
|
primary bool
|
||||||
columnName string
|
columnName string
|
||||||
|
// starts from 0 and is unused in this function, this is to ensure that the column names are in the correct order
|
||||||
|
columnPosition int
|
||||||
|
predicate *string
|
||||||
)
|
)
|
||||||
|
|
||||||
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName); err != nil {
|
if err := rows.Scan(&tableName, &indexName, &unique, &primary, &columnName, &columnPosition, &predicate); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if unique {
|
if unique {
|
||||||
if _, ok := uniqueIndicesMap[indexName]; !ok {
|
if _, ok := uniqueIndicesMap[indexName]; !ok {
|
||||||
uniqueIndicesMap[indexName] = &sqlschema.UniqueIndex{
|
uniqueIndicesMap[indexName] = &indexEntry{
|
||||||
TableName: name,
|
columns: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
|
||||||
ColumnNames: []sqlschema.ColumnName{sqlschema.ColumnName(columnName)},
|
predicate: predicate,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
uniqueIndicesMap[indexName].ColumnNames = append(uniqueIndicesMap[indexName].ColumnNames, sqlschema.ColumnName(columnName))
|
uniqueIndicesMap[indexName].columns = append(uniqueIndicesMap[indexName].columns, sqlschema.ColumnName(columnName))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
indices := make([]sqlschema.Index, 0)
|
indices := make([]sqlschema.Index, 0)
|
||||||
for _, index := range uniqueIndicesMap {
|
for indexName, entry := range uniqueIndicesMap {
|
||||||
indices = append(indices, index)
|
if entry.predicate != nil {
|
||||||
|
index := &sqlschema.PartialUniqueIndex{
|
||||||
|
TableName: name,
|
||||||
|
ColumnNames: entry.columns,
|
||||||
|
Where: *entry.predicate,
|
||||||
|
}
|
||||||
|
|
||||||
|
if index.Name() == indexName {
|
||||||
|
indices = append(indices, index)
|
||||||
|
} else {
|
||||||
|
indices = append(indices, index.Named(indexName))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
index := &sqlschema.UniqueIndex{
|
||||||
|
TableName: name,
|
||||||
|
ColumnNames: entry.columns,
|
||||||
|
}
|
||||||
|
|
||||||
|
if index.Name() == indexName {
|
||||||
|
indices = append(indices, index)
|
||||||
|
} else {
|
||||||
|
indices = append(indices, index.Named(indexName))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return indices, nil
|
return indices, nil
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ func (provider *provider) WrapNotFoundErrf(err error, code errors.Code, format s
|
|||||||
|
|
||||||
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
|
func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, format string, args ...any) error {
|
||||||
var pgErr *pgconn.PgError
|
var pgErr *pgconn.PgError
|
||||||
if errors.As(err, &pgErr) && pgErr.Code == "23505" {
|
if errors.As(err, &pgErr) && (pgErr.Code == "23505" || pgErr.Code == "23503") {
|
||||||
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
|
return errors.Wrapf(err, errors.TypeAlreadyExists, code, format, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
97
frontend/.cursor/rules/state-management.mdc
Normal file
97
frontend/.cursor/rules/state-management.mdc
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
---
|
||||||
|
globs: **/*.store.ts
|
||||||
|
alwaysApply: false
|
||||||
|
---
|
||||||
|
# State Management: React Query, nuqs, Zustand
|
||||||
|
|
||||||
|
Use the following stack. Do **not** introduce or recommend Redux or React Context for shared/global state.
|
||||||
|
|
||||||
|
## Server state → React Query
|
||||||
|
|
||||||
|
- **Use for:** API responses, time-series data, caching, background refetch, retries, stale/refresh.
|
||||||
|
- **Do not use Redux/Context** to store or mirror data that comes from React Query (e.g. do not dispatch API results into Redux).
|
||||||
|
- Prefer generated React Query hooks from `frontend/src/api/generated` when available.
|
||||||
|
- Keep server state in React Query; expose it via hooks that return the query result (and optionally memoized derived values). Do not duplicate it in Redux or Context.
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// ✅ GOOD: single source of truth from React Query
|
||||||
|
export function useAppStateHook() {
|
||||||
|
const { data, isError } = useQuery(...)
|
||||||
|
const memoizedConfigs = useMemo(() => ({ ... }), [data?.configs])
|
||||||
|
return { configs: memoizedConfigs, isError, ... }
|
||||||
|
}
|
||||||
|
|
||||||
|
// ❌ BAD: copying React Query result into Redux
|
||||||
|
dispatch({ type: UPDATE_LATEST_VERSION, payload: queryResponse.data })
|
||||||
|
```
|
||||||
|
|
||||||
|
## URL state → nuqs
|
||||||
|
|
||||||
|
- **Use for:** shareable state, filters, time range, selected values, pagination, view state that belongs in the URL.
|
||||||
|
- **Do not use Redux/Context** for state that should be shareable or reflected in the URL.
|
||||||
|
- Use [nuqs](https://nuqs.dev/docs/basic-usage) for typed, type-safe URL search params. Avoid ad-hoc `useSearchParams` encoding/decoding.
|
||||||
|
- Keep URL payload small; respect browser URL length limits (e.g. Chrome ~2k chars). Do not put large datasets or sensitive data in query params.
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// ✅ GOOD: nuqs for filters / time range / selection
|
||||||
|
const [timeRange, setTimeRange] = useQueryState('timeRange', parseAsString.withDefault('1h'))
|
||||||
|
const [page, setPage] = useQueryState('page', parseAsInteger.withDefault(1))
|
||||||
|
|
||||||
|
// ❌ BAD: Redux/Context for shareable or URL-synced state
|
||||||
|
const { timeRange } = useContext(SomeContext)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Client state → Zustand
|
||||||
|
|
||||||
|
- **Use for:** global/client state, cross-component state, feature flags, complex or large client objects (e.g. dashboard state, query builder state).
|
||||||
|
- **Do not use Redux or React Context** for global or feature-level client state.
|
||||||
|
- Prefer small, domain-scoped stores (e.g. DashboardStore, QueryBuilderStore).
|
||||||
|
|
||||||
|
### Zustand best practices (align with eslint-plugin-zustand-rules)
|
||||||
|
|
||||||
|
- **One store per module.** Do not define multiple `create()` calls in the same file; use one store per module (or compose slices into one store).
|
||||||
|
- **Always use selectors.** Call the store hook with a selector so only the used slice triggers re-renders. Never use `useStore()` with no selector.
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// ✅ GOOD: selector — re-renders only when isDashboardLocked changes
|
||||||
|
const isLocked = useDashboardStore(state => state.isDashboardLocked)
|
||||||
|
|
||||||
|
// ❌ BAD: no selector — re-renders on any store change
|
||||||
|
const state = useDashboardStore()
|
||||||
|
```
|
||||||
|
|
||||||
|
- **Never mutate state directly.** Update only via `set` or `setState` (or `getState()` + `set` for reads). No `state.foo = x` or `state.bears += 1` inside actions.
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// ✅ GOOD: use set
|
||||||
|
increment: () => set(state => ({ bears: state.bears + 1 }))
|
||||||
|
|
||||||
|
// ❌ BAD: direct mutation
|
||||||
|
increment: () => { state.bears += 1 }
|
||||||
|
```
|
||||||
|
|
||||||
|
- **State properties before actions.** In the store object, list all state fields first, then action functions.
|
||||||
|
- **Split into slices when state is large.** If a store has many top-level properties (e.g. more than 5–10), split into slice factories and combine with one `create()`.
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// ✅ GOOD: slices for large state
|
||||||
|
const createBearSlice = set => ({ bears: 0, addBear: () => set(s => ({ bears: s.bears + 1 })) })
|
||||||
|
const createFishSlice = set => ({ fish: 0, addFish: () => set(s => ({ fish: s.fish + 1 })) })
|
||||||
|
const useStore = create(set => ({ ...createBearSlice(set), ...createFishSlice(set) }))
|
||||||
|
```
|
||||||
|
|
||||||
|
- **In projects using Zustand:** add `eslint-plugin-zustand-rules` and extend `plugin:zustand-rules/recommended` to enforce these rules automatically.
|
||||||
|
|
||||||
|
## Local state → React state only
|
||||||
|
|
||||||
|
- **Use useState/useReducer** for: component-local UI state, form inputs, toggles, hover state, data that never leaves the component.
|
||||||
|
- Do not use Zustand, Redux, or Context for state that is purely local to one component or a small subtree.
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
| State type | Use | Avoid |
|
||||||
|
|-------------------|------------------|--------------------|
|
||||||
|
| Server / API | React Query | Redux, Context |
|
||||||
|
| URL / shareable | nuqs | Redux, Context |
|
||||||
|
| Global client | Zustand | Redux, Context |
|
||||||
|
| Local UI | useState/useReducer | Zustand, Redux, Context |
|
||||||
150
frontend/.cursor/skills/migrate-state-management/SKILL.md
Normal file
150
frontend/.cursor/skills/migrate-state-management/SKILL.md
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
---
|
||||||
|
name: migrate-state-management
|
||||||
|
description: Migrate Redux or React Context to the correct state option (React Query for server state, nuqs for URL/shareable state, Zustand for global client state). Use when refactoring away from Redux/Context, moving state to the right store, or when the user asks to migrate state management.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Migrate State: Redux/Context → React Query, nuqs, Zustand
|
||||||
|
|
||||||
|
Do **not** introduce or recommend Redux or React Context. Migrate existing usage to the stack below.
|
||||||
|
|
||||||
|
## 1. Classify the state
|
||||||
|
|
||||||
|
Before changing code, classify what the state represents:
|
||||||
|
|
||||||
|
| If the state is… | Migrate to | Do not use |
|
||||||
|
|------------------|------------|------------|
|
||||||
|
| From API / server (versions, configs, fetched lists, time-series) | **React Query** | Redux, Context |
|
||||||
|
| Shareable via URL (filters, time range, page, selected ids) | **nuqs** | Redux, Context |
|
||||||
|
| Global/client UI (dashboard lock, query builder, feature flags, large client objects) | **Zustand** | Redux, Context |
|
||||||
|
| Local to one component (inputs, toggles, hover) | **useState / useReducer** | Zustand, Redux, Context |
|
||||||
|
|
||||||
|
If one slice mixes concerns (e.g. Redux has both API data and pagination), split: API → React Query, pagination → nuqs, rest → Zustand or local state.
|
||||||
|
|
||||||
|
## 2. Migrate to React Query (server state)
|
||||||
|
|
||||||
|
**When:** State comes from or mirrors an API response (e.g. `currentVersion`, `latestVersion`, `configs`, lists).
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
|
||||||
|
1. Find where the data is fetched (existing `useQuery`/API call) and where it is dispatched or set in Context/Redux.
|
||||||
|
2. Remove the dispatch/set that writes API results into Redux/Context.
|
||||||
|
3. Expose a single hook that uses the query and returns the same shape consumers expect (use `useMemo` for derived objects like `configs` to avoid unnecessary re-renders).
|
||||||
|
4. Replace Redux/Context consumption with the new hook. Prefer generated React Query hooks from `frontend/src/api/generated` when available.
|
||||||
|
5. Configure cache/refetch (e.g. `refetchOnMount: false`, `staleTime`) so behavior matches previous “single source” expectations.
|
||||||
|
|
||||||
|
**Before (Redux mirroring React Query):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
if (getUserLatestVersionResponse.isFetched && getUserLatestVersionResponse.isSuccess && getUserLatestVersionResponse.data?.payload) {
|
||||||
|
dispatch({ type: UPDATE_LATEST_VERSION, payload: { latestVersion: getUserLatestVersionResponse.data.payload.tag_name } })
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**After (single source in React Query):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
export function useAppStateHook() {
|
||||||
|
const { data, isError } = useQuery(...)
|
||||||
|
const memoizedConfigs = useMemo(() => ({ ... }), [data?.configs])
|
||||||
|
return {
|
||||||
|
latestVersion: data?.payload?.tag_name,
|
||||||
|
configs: memoizedConfigs,
|
||||||
|
isError,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Consumers use `useAppStateHook()` instead of `useSelector` or Context. Do not copy React Query result into Redux or Context.
|
||||||
|
|
||||||
|
## 3. Migrate to nuqs (URL / shareable state)
|
||||||
|
|
||||||
|
**When:** State should be in the URL: filters, time range, pagination, selected values, view state. Keep payload small (e.g. Chrome ~2k chars); no large datasets or sensitive data.
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
|
||||||
|
1. Identify which Redux/Context fields are shareable or already reflected in the URL (e.g. `currentPage`, `timeRange`, `selectedFilter`).
|
||||||
|
2. Add nuqs (or use existing): `useQueryState('param', parseAsString.withDefault('…'))` (or `parseAsInteger`, etc.).
|
||||||
|
3. Replace reads/writes of those fields with nuqs hooks. Use typed parsers; avoid ad-hoc `useSearchParams` encoding/decoding.
|
||||||
|
4. Remove the same fields from Redux/Context and their reducers/providers.
|
||||||
|
|
||||||
|
**Before (Context/Redux):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
const { timeRange } = useContext(SomeContext)
|
||||||
|
const [page, setPage] = useDispatch(...)
|
||||||
|
```
|
||||||
|
|
||||||
|
**After (nuqs):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
const [timeRange, setTimeRange] = useQueryState('timeRange', parseAsString.withDefault('1h'))
|
||||||
|
const [page, setPage] = useQueryState('page', parseAsInteger.withDefault(1))
|
||||||
|
```
|
||||||
|
|
||||||
|
## 4. Migrate to Zustand (global client state)
|
||||||
|
|
||||||
|
**When:** State is global or cross-component client state: feature flags, dashboard state, query builder state, complex/large client objects (e.g. up to ~1.5–2MB). Not for server cache or local-only UI.
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
|
||||||
|
1. Create one store per domain (e.g. `DashboardStore`, `QueryBuilderStore`). One `create()` per module; for large state use slice factories and combine.
|
||||||
|
2. Put state properties first, then actions. Use `set` (or `setState` / `getState()` + `set`) for updates; never mutate state directly.
|
||||||
|
3. Replace Context/Redux consumption with the store hook **and a selector** so only the used slice triggers re-renders.
|
||||||
|
4. Remove the old Context provider / Redux slice and related dispatches.
|
||||||
|
|
||||||
|
**Selector (required):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
const isLocked = useDashboardStore(state => state.isDashboardLocked)
|
||||||
|
```
|
||||||
|
|
||||||
|
Never use `useStore()` with no selector. Never do `state.foo = x` inside actions; use `set(state => ({ ... }))`.
|
||||||
|
|
||||||
|
**Before (Context/Redux):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
const { isDashboardLocked, setLocked } = useContext(DashboardContext)
|
||||||
|
```
|
||||||
|
|
||||||
|
**After (Zustand):**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
const isLocked = useDashboardStore(state => state.isDashboardLocked)
|
||||||
|
const setLocked = useDashboardStore(state => state.setLocked)
|
||||||
|
```
|
||||||
|
|
||||||
|
For large stores (many top-level fields), split into slices and combine:
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
const createBearSlice = set => ({ bears: 0, addBear: () => set(s => ({ bears: s.bears + 1 })) })
|
||||||
|
const useStore = create(set => ({ ...createBearSlice(set), ...createFishSlice(set) }))
|
||||||
|
```
|
||||||
|
|
||||||
|
Add `eslint-plugin-zustand-rules` with `plugin:zustand-rules/recommended` to enforce selectors and no direct mutation.
|
||||||
|
|
||||||
|
## 5. Migrate to local state (useState / useReducer)
|
||||||
|
|
||||||
|
**When:** State is used only inside one component or a small subtree (form inputs, toggles, hover, panel selection). No URL sync, no cross-feature sharing.
|
||||||
|
|
||||||
|
**Steps:**
|
||||||
|
|
||||||
|
1. Move the state into the component that owns it (or the smallest common parent).
|
||||||
|
2. Use `useState` or `useReducer` (useReducer when multiple related fields change together).
|
||||||
|
3. Remove from Redux/Context and any provider/slice.
|
||||||
|
|
||||||
|
Do not use Zustand, Redux, or Context for purely local UI state.
|
||||||
|
|
||||||
|
## 6. Migration checklist
|
||||||
|
|
||||||
|
- [ ] Classify each piece of state (server / URL / global client / local).
|
||||||
|
- [ ] Server state: move to React Query; expose via hook; remove Redux/Context mirroring.
|
||||||
|
- [ ] URL state: move to nuqs; remove from Redux/Context; keep URL payload small.
|
||||||
|
- [ ] Global client state: move to Zustand with selectors and immutable updates; one store per domain.
|
||||||
|
- [ ] Local state: move to useState/useReducer in the owning component.
|
||||||
|
- [ ] Remove old Redux slices / Context providers and all dispatches/consumers for migrated state.
|
||||||
|
- [ ] Do not duplicate the same data in multiple places (e.g. React Query + Redux).
|
||||||
|
|
||||||
|
## Additional resources
|
||||||
|
|
||||||
|
- Project rule: [.cursor/rules/state-management.mdc](../../rules/state-management.mdc)
|
||||||
|
- Detailed patterns and rationale: [reference.md](reference.md)
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
# State migration reference
|
||||||
|
|
||||||
|
## Why migrate
|
||||||
|
|
||||||
|
- **Context:** Re-renders all consumers on any change; no granular subscriptions; becomes brittle at scale.
|
||||||
|
- **Redux:** Heavy boilerplate (actions, reducers, selectors, Provider); slower onboarding; often used to mirror React Query or URL state.
|
||||||
|
- **Goal:** Fewer mechanisms, domain isolation, granular subscriptions, single source of truth per state type.
|
||||||
|
|
||||||
|
## React Query migration (server state)
|
||||||
|
|
||||||
|
Typical anti-pattern: API is called via React Query, then result is dispatched to Redux. Flow becomes: Component → useQueries → API → dispatch → Reducer → Redux state → useSelector.
|
||||||
|
|
||||||
|
Correct flow: Component → useQuery (or custom hook wrapping it) → same component reads from hook. No Redux/Context in between.
|
||||||
|
|
||||||
|
- Prefer generated hooks from `frontend/src/api/generated`.
|
||||||
|
- For “app state” that is just API data (versions, configs), one hook that returns `{ ...data, configs: useMemo(...) }` is enough. No selectors needed for plain data; useMemo only where the value is used as dependency (e.g. in useState).
|
||||||
|
- Set `staleTime` / `refetchOnMount` etc. so refetch behavior matches previous expectations.
|
||||||
|
|
||||||
|
## nuqs migration (URL state)
|
||||||
|
|
||||||
|
Redux/Context often hold pagination, filters, time range, selected values that are shareable. Those belong in the URL.
|
||||||
|
|
||||||
|
- Use [nuqs](https://nuqs.dev/docs/basic-usage) for typed search params. Avoid ad-hoc `useSearchParams` + manual encoding.
|
||||||
|
- Browser limits: Chrome ~2k chars practical; keep payload small; no large datasets or secrets in query params.
|
||||||
|
- If the app uses TanStack Router, search params can be handled there; otherwise nuqs is the standard.
|
||||||
|
|
||||||
|
## Zustand migration (client state)
|
||||||
|
|
||||||
|
- One store per domain (e.g. DashboardStore, QueryBuilderStore). Multiple `create()` in one file is disallowed; use one store or composed slices.
|
||||||
|
- Always use a selector: `useStore(s => s.field)` so only that field drives re-renders.
|
||||||
|
- Never mutate: update only via `set(state => ({ ... }))` or `setState` / `getState()` + `set`.
|
||||||
|
- State properties first, then actions. For 5–10+ top-level fields, split into slice factories and combine with one `create()`.
|
||||||
|
- Large client objects: Zustand is for “large” in the ~1.5–2MB range; above that, optimize at API/store design.
|
||||||
|
- Testing: no Provider; stores are plain functions; easy to reset and mock.
|
||||||
|
|
||||||
|
## What not to use
|
||||||
|
|
||||||
|
- **Redux / Context** for new or migrated shared/global state.
|
||||||
|
- **Redux / Context** to store or mirror React Query results.
|
||||||
|
- **Redux / Context** for state that should live in the URL (use nuqs).
|
||||||
|
- **Zustand / Redux / Context** for component-local UI (use useState/useReducer).
|
||||||
|
|
||||||
|
## Summary table
|
||||||
|
|
||||||
|
| State type | Use | Avoid |
|
||||||
|
|-------------|--------------------|-----------------|
|
||||||
|
| Server/API | React Query | Redux, Context |
|
||||||
|
| URL/shareable | nuqs | Redux, Context |
|
||||||
|
| Global client | Zustand | Redux, Context |
|
||||||
|
| Local UI | useState/useReducer | Zustand, Redux, Context |
|
||||||
@@ -193,6 +193,16 @@ module.exports = {
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
'no-restricted-syntax': [
|
||||||
|
'error',
|
||||||
|
{
|
||||||
|
selector:
|
||||||
|
// TODO: Make this generic on removal of redux
|
||||||
|
"CallExpression[callee.property.name='getState'][callee.object.name=/^use/]",
|
||||||
|
message:
|
||||||
|
'Avoid calling .getState() directly. Export a standalone action from the store instead.',
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
overrides: [
|
overrides: [
|
||||||
{
|
{
|
||||||
@@ -217,5 +227,13 @@ module.exports = {
|
|||||||
'@typescript-eslint/no-unused-vars': 'warn',
|
'@typescript-eslint/no-unused-vars': 'warn',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
// Store definition files are the only place .getState() is permitted —
|
||||||
|
// they are the canonical source for standalone action exports.
|
||||||
|
files: ['**/*Store.{ts,tsx}'],
|
||||||
|
rules: {
|
||||||
|
'no-restricted-syntax': 'off',
|
||||||
|
},
|
||||||
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|||||||
29
frontend/__mocks__/resizableMock.tsx
Normal file
29
frontend/__mocks__/resizableMock.tsx
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { PropsWithChildren } from 'react';
|
||||||
|
|
||||||
|
type CommonProps = PropsWithChildren<{
|
||||||
|
className?: string;
|
||||||
|
minSize?: number;
|
||||||
|
maxSize?: number;
|
||||||
|
defaultSize?: number;
|
||||||
|
direction?: 'horizontal' | 'vertical';
|
||||||
|
autoSaveId?: string;
|
||||||
|
withHandle?: boolean;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
export function ResizablePanelGroup({
|
||||||
|
children,
|
||||||
|
className,
|
||||||
|
}: CommonProps): JSX.Element {
|
||||||
|
return <div className={className}>{children}</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ResizablePanel({
|
||||||
|
children,
|
||||||
|
className,
|
||||||
|
}: CommonProps): JSX.Element {
|
||||||
|
return <div className={className}>{children}</div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ResizableHandle({ className }: CommonProps): JSX.Element {
|
||||||
|
return <div className={className} />;
|
||||||
|
}
|
||||||
@@ -14,6 +14,7 @@ const config: Config.InitialOptions = {
|
|||||||
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
|
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
|
||||||
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
|
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
|
||||||
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
|
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
|
||||||
|
'^@signozhq/resizable$': '<rootDir>/__mocks__/resizableMock.tsx',
|
||||||
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||||
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||||
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||||
@@ -23,7 +24,8 @@ const config: Config.InitialOptions = {
|
|||||||
'<rootDir>/node_modules/@signozhq/icons/dist/index.esm.js',
|
'<rootDir>/node_modules/@signozhq/icons/dist/index.esm.js',
|
||||||
'^react-syntax-highlighter/dist/esm/(.*)$':
|
'^react-syntax-highlighter/dist/esm/(.*)$':
|
||||||
'<rootDir>/node_modules/react-syntax-highlighter/dist/cjs/$1',
|
'<rootDir>/node_modules/react-syntax-highlighter/dist/cjs/$1',
|
||||||
'^@signozhq/([^/]+)$': '<rootDir>/node_modules/@signozhq/$1/dist/$1.js',
|
'^@signozhq/(?!ui$)([^/]+)$':
|
||||||
|
'<rootDir>/node_modules/@signozhq/$1/dist/$1.js',
|
||||||
},
|
},
|
||||||
extensionsToTreatAsEsm: ['.ts'],
|
extensionsToTreatAsEsm: ['.ts'],
|
||||||
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
|
testMatch: ['<rootDir>/src/**/*?(*.)(test).(ts|js)?(x)'],
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
"prettify": "prettier --write .",
|
"prettify": "prettier --write .",
|
||||||
"fmt": "prettier --check .",
|
"fmt": "prettier --check .",
|
||||||
"lint": "eslint ./src",
|
"lint": "eslint ./src",
|
||||||
|
"lint:generated": "eslint ./src/api/generated --fix",
|
||||||
"lint:fix": "eslint ./src --fix",
|
"lint:fix": "eslint ./src --fix",
|
||||||
"jest": "jest",
|
"jest": "jest",
|
||||||
"jest:coverage": "jest --coverage",
|
"jest:coverage": "jest --coverage",
|
||||||
@@ -64,8 +65,9 @@
|
|||||||
"@signozhq/sonner": "0.1.0",
|
"@signozhq/sonner": "0.1.0",
|
||||||
"@signozhq/switch": "0.0.2",
|
"@signozhq/switch": "0.0.2",
|
||||||
"@signozhq/table": "0.3.7",
|
"@signozhq/table": "0.3.7",
|
||||||
"@signozhq/toggle-group": "^0.0.1",
|
"@signozhq/toggle-group": "0.0.1",
|
||||||
"@signozhq/tooltip": "0.0.2",
|
"@signozhq/tooltip": "0.0.2",
|
||||||
|
"@signozhq/ui": "0.0.5",
|
||||||
"@tanstack/react-table": "8.20.6",
|
"@tanstack/react-table": "8.20.6",
|
||||||
"@tanstack/react-virtual": "3.11.2",
|
"@tanstack/react-virtual": "3.11.2",
|
||||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||||
@@ -135,6 +137,7 @@
|
|||||||
"react-full-screen": "1.1.1",
|
"react-full-screen": "1.1.1",
|
||||||
"react-grid-layout": "^1.3.4",
|
"react-grid-layout": "^1.3.4",
|
||||||
"react-helmet-async": "1.3.0",
|
"react-helmet-async": "1.3.0",
|
||||||
|
"react-hook-form": "7.71.2",
|
||||||
"react-i18next": "^11.16.1",
|
"react-i18next": "^11.16.1",
|
||||||
"react-lottie": "1.2.10",
|
"react-lottie": "1.2.10",
|
||||||
"react-markdown": "8.0.7",
|
"react-markdown": "8.0.7",
|
||||||
@@ -283,4 +286,4 @@
|
|||||||
"tmp": "0.2.4",
|
"tmp": "0.2.4",
|
||||||
"vite": "npm:rolldown-vite@7.3.1"
|
"vite": "npm:rolldown-vite@7.3.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
2021
frontend/public/Images/allInOneLightMode.svg
Normal file
2021
frontend/public/Images/allInOneLightMode.svg
Normal file
File diff suppressed because it is too large
Load Diff
|
After Width: | Height: | Size: 214 KiB |
@@ -15,5 +15,6 @@
|
|||||||
"logs_to_metrics": "Logs To Metrics",
|
"logs_to_metrics": "Logs To Metrics",
|
||||||
"roles": "Roles",
|
"roles": "Roles",
|
||||||
"role_details": "Role Details",
|
"role_details": "Role Details",
|
||||||
"members": "Members"
|
"members": "Members",
|
||||||
|
"service_accounts": "Service Accounts"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,5 +50,8 @@
|
|||||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||||
"METER": "SigNoz | Meter"
|
"METER": "SigNoz | Meter",
|
||||||
|
"ROLES_SETTINGS": "SigNoz | Roles",
|
||||||
|
"MEMBERS_SETTINGS": "SigNoz | Members",
|
||||||
|
"SERVICE_ACCOUNTS_SETTINGS": "SigNoz | Service Accounts"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,5 +15,6 @@
|
|||||||
"logs_to_metrics": "Logs To Metrics",
|
"logs_to_metrics": "Logs To Metrics",
|
||||||
"roles": "Roles",
|
"roles": "Roles",
|
||||||
"role_details": "Role Details",
|
"role_details": "Role Details",
|
||||||
"members": "Members"
|
"members": "Members",
|
||||||
|
"service_accounts": "Service Accounts"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -75,5 +75,6 @@
|
|||||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||||
"METER": "SigNoz | Meter",
|
"METER": "SigNoz | Meter",
|
||||||
"ROLES_SETTINGS": "SigNoz | Roles",
|
"ROLES_SETTINGS": "SigNoz | Roles",
|
||||||
"MEMBERS_SETTINGS": "SigNoz | Members"
|
"MEMBERS_SETTINGS": "SigNoz | Members",
|
||||||
|
"SERVICE_ACCOUNTS_SETTINGS": "SigNoz | Service Accounts"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ echo "\n✅ Prettier formatting successful"
|
|||||||
|
|
||||||
# Fix linting issues
|
# Fix linting issues
|
||||||
echo "\n\n---\nRunning eslint...\n"
|
echo "\n\n---\nRunning eslint...\n"
|
||||||
if ! yarn lint --fix --quiet src/api/generated; then
|
if ! yarn lint:generated; then
|
||||||
echo "ESLint check failed! Please fix linting errors before proceeding."
|
echo "ESLint check failed! Please fix linting errors before proceeding."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -21,6 +21,8 @@ import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
|||||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||||
import type {
|
import type {
|
||||||
AuthtypesPatchableObjectsDTO,
|
AuthtypesPatchableObjectsDTO,
|
||||||
|
AuthtypesPatchableRoleDTO,
|
||||||
|
AuthtypesPostableRoleDTO,
|
||||||
CreateRole201,
|
CreateRole201,
|
||||||
DeleteRolePathParameters,
|
DeleteRolePathParameters,
|
||||||
GetObjects200,
|
GetObjects200,
|
||||||
@@ -31,8 +33,6 @@ import type {
|
|||||||
PatchObjectsPathParameters,
|
PatchObjectsPathParameters,
|
||||||
PatchRolePathParameters,
|
PatchRolePathParameters,
|
||||||
RenderErrorResponseDTO,
|
RenderErrorResponseDTO,
|
||||||
RoletypesPatchableRoleDTO,
|
|
||||||
RoletypesPostableRoleDTO,
|
|
||||||
} from '../sigNoz.schemas';
|
} from '../sigNoz.schemas';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -118,14 +118,14 @@ export const invalidateListRoles = async (
|
|||||||
* @summary Create role
|
* @summary Create role
|
||||||
*/
|
*/
|
||||||
export const createRole = (
|
export const createRole = (
|
||||||
roletypesPostableRoleDTO: BodyType<RoletypesPostableRoleDTO>,
|
authtypesPostableRoleDTO: BodyType<AuthtypesPostableRoleDTO>,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
) => {
|
) => {
|
||||||
return GeneratedAPIInstance<CreateRole201>({
|
return GeneratedAPIInstance<CreateRole201>({
|
||||||
url: `/api/v1/roles`,
|
url: `/api/v1/roles`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
data: roletypesPostableRoleDTO,
|
data: authtypesPostableRoleDTO,
|
||||||
signal,
|
signal,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -137,13 +137,13 @@ export const getCreateRoleMutationOptions = <
|
|||||||
mutation?: UseMutationOptions<
|
mutation?: UseMutationOptions<
|
||||||
Awaited<ReturnType<typeof createRole>>,
|
Awaited<ReturnType<typeof createRole>>,
|
||||||
TError,
|
TError,
|
||||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
}): UseMutationOptions<
|
}): UseMutationOptions<
|
||||||
Awaited<ReturnType<typeof createRole>>,
|
Awaited<ReturnType<typeof createRole>>,
|
||||||
TError,
|
TError,
|
||||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
const mutationKey = ['createRole'];
|
const mutationKey = ['createRole'];
|
||||||
@@ -157,7 +157,7 @@ export const getCreateRoleMutationOptions = <
|
|||||||
|
|
||||||
const mutationFn: MutationFunction<
|
const mutationFn: MutationFunction<
|
||||||
Awaited<ReturnType<typeof createRole>>,
|
Awaited<ReturnType<typeof createRole>>,
|
||||||
{ data: BodyType<RoletypesPostableRoleDTO> }
|
{ data: BodyType<AuthtypesPostableRoleDTO> }
|
||||||
> = (props) => {
|
> = (props) => {
|
||||||
const { data } = props ?? {};
|
const { data } = props ?? {};
|
||||||
|
|
||||||
@@ -170,7 +170,7 @@ export const getCreateRoleMutationOptions = <
|
|||||||
export type CreateRoleMutationResult = NonNullable<
|
export type CreateRoleMutationResult = NonNullable<
|
||||||
Awaited<ReturnType<typeof createRole>>
|
Awaited<ReturnType<typeof createRole>>
|
||||||
>;
|
>;
|
||||||
export type CreateRoleMutationBody = BodyType<RoletypesPostableRoleDTO>;
|
export type CreateRoleMutationBody = BodyType<AuthtypesPostableRoleDTO>;
|
||||||
export type CreateRoleMutationError = ErrorType<RenderErrorResponseDTO>;
|
export type CreateRoleMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -183,13 +183,13 @@ export const useCreateRole = <
|
|||||||
mutation?: UseMutationOptions<
|
mutation?: UseMutationOptions<
|
||||||
Awaited<ReturnType<typeof createRole>>,
|
Awaited<ReturnType<typeof createRole>>,
|
||||||
TError,
|
TError,
|
||||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
}): UseMutationResult<
|
}): UseMutationResult<
|
||||||
Awaited<ReturnType<typeof createRole>>,
|
Awaited<ReturnType<typeof createRole>>,
|
||||||
TError,
|
TError,
|
||||||
{ data: BodyType<RoletypesPostableRoleDTO> },
|
{ data: BodyType<AuthtypesPostableRoleDTO> },
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
const mutationOptions = getCreateRoleMutationOptions(options);
|
const mutationOptions = getCreateRoleMutationOptions(options);
|
||||||
@@ -370,13 +370,13 @@ export const invalidateGetRole = async (
|
|||||||
*/
|
*/
|
||||||
export const patchRole = (
|
export const patchRole = (
|
||||||
{ id }: PatchRolePathParameters,
|
{ id }: PatchRolePathParameters,
|
||||||
roletypesPatchableRoleDTO: BodyType<RoletypesPatchableRoleDTO>,
|
authtypesPatchableRoleDTO: BodyType<AuthtypesPatchableRoleDTO>,
|
||||||
) => {
|
) => {
|
||||||
return GeneratedAPIInstance<string>({
|
return GeneratedAPIInstance<string>({
|
||||||
url: `/api/v1/roles/${id}`,
|
url: `/api/v1/roles/${id}`,
|
||||||
method: 'PATCH',
|
method: 'PATCH',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
data: roletypesPatchableRoleDTO,
|
data: authtypesPatchableRoleDTO,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -389,7 +389,7 @@ export const getPatchRoleMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: PatchRolePathParameters;
|
pathParams: PatchRolePathParameters;
|
||||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -398,7 +398,7 @@ export const getPatchRoleMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: PatchRolePathParameters;
|
pathParams: PatchRolePathParameters;
|
||||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -415,7 +415,7 @@ export const getPatchRoleMutationOptions = <
|
|||||||
Awaited<ReturnType<typeof patchRole>>,
|
Awaited<ReturnType<typeof patchRole>>,
|
||||||
{
|
{
|
||||||
pathParams: PatchRolePathParameters;
|
pathParams: PatchRolePathParameters;
|
||||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||||
}
|
}
|
||||||
> = (props) => {
|
> = (props) => {
|
||||||
const { pathParams, data } = props ?? {};
|
const { pathParams, data } = props ?? {};
|
||||||
@@ -429,7 +429,7 @@ export const getPatchRoleMutationOptions = <
|
|||||||
export type PatchRoleMutationResult = NonNullable<
|
export type PatchRoleMutationResult = NonNullable<
|
||||||
Awaited<ReturnType<typeof patchRole>>
|
Awaited<ReturnType<typeof patchRole>>
|
||||||
>;
|
>;
|
||||||
export type PatchRoleMutationBody = BodyType<RoletypesPatchableRoleDTO>;
|
export type PatchRoleMutationBody = BodyType<AuthtypesPatchableRoleDTO>;
|
||||||
export type PatchRoleMutationError = ErrorType<RenderErrorResponseDTO>;
|
export type PatchRoleMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -444,7 +444,7 @@ export const usePatchRole = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: PatchRolePathParameters;
|
pathParams: PatchRolePathParameters;
|
||||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -453,7 +453,7 @@ export const usePatchRole = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: PatchRolePathParameters;
|
pathParams: PatchRolePathParameters;
|
||||||
data: BodyType<RoletypesPatchableRoleDTO>;
|
data: BodyType<AuthtypesPatchableRoleDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
|
|||||||
@@ -278,6 +278,13 @@ export interface AuthtypesPatchableObjectsDTO {
|
|||||||
deletions: AuthtypesGettableObjectsDTO[] | null;
|
deletions: AuthtypesGettableObjectsDTO[] | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface AuthtypesPatchableRoleDTO {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
description: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface AuthtypesPostableAuthDomainDTO {
|
export interface AuthtypesPostableAuthDomainDTO {
|
||||||
config?: AuthtypesAuthDomainConfigDTO;
|
config?: AuthtypesAuthDomainConfigDTO;
|
||||||
/**
|
/**
|
||||||
@@ -301,6 +308,17 @@ export interface AuthtypesPostableEmailPasswordSessionDTO {
|
|||||||
password?: string;
|
password?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface AuthtypesPostableRoleDTO {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
description?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
}
|
||||||
|
|
||||||
export interface AuthtypesPostableRotateTokenDTO {
|
export interface AuthtypesPostableRotateTokenDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -319,6 +337,39 @@ export interface AuthtypesResourceDTO {
|
|||||||
type: string;
|
type: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface AuthtypesRoleDTO {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @format date-time
|
||||||
|
*/
|
||||||
|
createdAt?: Date;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
description: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
id: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
orgId: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
type: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @format date-time
|
||||||
|
*/
|
||||||
|
updatedAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @nullable
|
* @nullable
|
||||||
*/
|
*/
|
||||||
@@ -725,6 +776,45 @@ export interface GatewaytypesUpdatableIngestionKeyLimitDTO {
|
|||||||
tags?: string[] | null;
|
tags?: string[] | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface GlobaltypesAPIKeyConfigDTO {
|
||||||
|
/**
|
||||||
|
* @type boolean
|
||||||
|
*/
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GlobaltypesConfigDTO {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
external_url?: string;
|
||||||
|
identN?: GlobaltypesIdentNConfigDTO;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
ingestion_url?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GlobaltypesIdentNConfigDTO {
|
||||||
|
apikey?: GlobaltypesAPIKeyConfigDTO;
|
||||||
|
impersonation?: GlobaltypesImpersonationConfigDTO;
|
||||||
|
tokenizer?: GlobaltypesTokenizerConfigDTO;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GlobaltypesImpersonationConfigDTO {
|
||||||
|
/**
|
||||||
|
* @type boolean
|
||||||
|
*/
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GlobaltypesTokenizerConfigDTO {
|
||||||
|
/**
|
||||||
|
* @type boolean
|
||||||
|
*/
|
||||||
|
enabled?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface MetricsexplorertypesListMetricDTO {
|
export interface MetricsexplorertypesListMetricDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -2039,57 +2129,6 @@ export interface RenderErrorResponseDTO {
|
|||||||
status: string;
|
status: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface RoletypesPatchableRoleDTO {
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
description: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RoletypesPostableRoleDTO {
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
description?: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface RoletypesRoleDTO {
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
* @format date-time
|
|
||||||
*/
|
|
||||||
createdAt?: Date;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
description: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
id: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
name: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
orgId: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
type: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
* @format date-time
|
|
||||||
*/
|
|
||||||
updatedAt?: Date;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ServiceaccounttypesFactorAPIKeyDTO {
|
export interface ServiceaccounttypesFactorAPIKeyDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -2345,6 +2384,47 @@ export interface TypesChangePasswordRequestDTO {
|
|||||||
userId?: string;
|
userId?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface TypesDeprecatedUserDTO {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @format date-time
|
||||||
|
*/
|
||||||
|
createdAt?: Date;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
displayName?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
email?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
id: string;
|
||||||
|
/**
|
||||||
|
* @type boolean
|
||||||
|
*/
|
||||||
|
isRoot?: boolean;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
orgId?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
role?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
status?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @format date-time
|
||||||
|
*/
|
||||||
|
updatedAt?: Date;
|
||||||
|
}
|
||||||
|
|
||||||
export interface TypesGettableAPIKeyDTO {
|
export interface TypesGettableAPIKeyDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -2402,17 +2482,6 @@ export interface TypesGettableAPIKeyDTO {
|
|||||||
userId?: string;
|
userId?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TypesGettableGlobalConfigDTO {
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
external_url?: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
ingestion_url?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TypesIdentifiableDTO {
|
export interface TypesIdentifiableDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -2511,25 +2580,6 @@ export interface TypesPostableAPIKeyDTO {
|
|||||||
role?: string;
|
role?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TypesPostableAcceptInviteDTO {
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
displayName?: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
password?: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
sourceUrl?: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
token?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface TypesPostableBulkInviteRequestDTO {
|
export interface TypesPostableBulkInviteRequestDTO {
|
||||||
/**
|
/**
|
||||||
* @type array
|
* @type array
|
||||||
@@ -2673,10 +2723,6 @@ export interface TypesUserDTO {
|
|||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
orgId?: string;
|
orgId?: string;
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
role?: string;
|
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3026,18 +3072,7 @@ export type GetResetPasswordToken200 = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export type GetGlobalConfig200 = {
|
export type GetGlobalConfig200 = {
|
||||||
data: TypesGettableGlobalConfigDTO;
|
data: GlobaltypesConfigDTO;
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
status: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ListInvite200 = {
|
|
||||||
/**
|
|
||||||
* @type array
|
|
||||||
*/
|
|
||||||
data: TypesInviteDTO[];
|
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3052,28 +3087,6 @@ export type CreateInvite201 = {
|
|||||||
status: string;
|
status: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type DeleteInvitePathParameters = {
|
|
||||||
id: string;
|
|
||||||
};
|
|
||||||
export type GetInvitePathParameters = {
|
|
||||||
token: string;
|
|
||||||
};
|
|
||||||
export type GetInvite200 = {
|
|
||||||
data: TypesInviteDTO;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
status: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type AcceptInvite201 = {
|
|
||||||
data: TypesUserDTO;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
status: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ListPromotedAndIndexedPaths200 = {
|
export type ListPromotedAndIndexedPaths200 = {
|
||||||
/**
|
/**
|
||||||
* @type array
|
* @type array
|
||||||
@@ -3163,7 +3176,7 @@ export type ListRoles200 = {
|
|||||||
/**
|
/**
|
||||||
* @type array
|
* @type array
|
||||||
*/
|
*/
|
||||||
data: RoletypesRoleDTO[];
|
data: AuthtypesRoleDTO[];
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3185,7 +3198,7 @@ export type GetRolePathParameters = {
|
|||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
export type GetRole200 = {
|
export type GetRole200 = {
|
||||||
data: RoletypesRoleDTO;
|
data: AuthtypesRoleDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3290,7 +3303,7 @@ export type ListUsers200 = {
|
|||||||
/**
|
/**
|
||||||
* @type array
|
* @type array
|
||||||
*/
|
*/
|
||||||
data: TypesUserDTO[];
|
data: TypesDeprecatedUserDTO[];
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3304,7 +3317,7 @@ export type GetUserPathParameters = {
|
|||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
export type GetUser200 = {
|
export type GetUser200 = {
|
||||||
data: TypesUserDTO;
|
data: TypesDeprecatedUserDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3315,7 +3328,7 @@ export type UpdateUserPathParameters = {
|
|||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
export type UpdateUser200 = {
|
export type UpdateUser200 = {
|
||||||
data: TypesUserDTO;
|
data: TypesDeprecatedUserDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3323,7 +3336,7 @@ export type UpdateUser200 = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export type GetMyUser200 = {
|
export type GetMyUser200 = {
|
||||||
data: TypesUserDTO;
|
data: TypesDeprecatedUserDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -20,33 +20,27 @@ import { useMutation, useQuery } from 'react-query';
|
|||||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||||
import type {
|
import type {
|
||||||
AcceptInvite201,
|
|
||||||
ChangePasswordPathParameters,
|
ChangePasswordPathParameters,
|
||||||
CreateAPIKey201,
|
CreateAPIKey201,
|
||||||
CreateInvite201,
|
CreateInvite201,
|
||||||
DeleteInvitePathParameters,
|
|
||||||
DeleteUserPathParameters,
|
DeleteUserPathParameters,
|
||||||
GetInvite200,
|
|
||||||
GetInvitePathParameters,
|
|
||||||
GetMyUser200,
|
GetMyUser200,
|
||||||
GetResetPasswordToken200,
|
GetResetPasswordToken200,
|
||||||
GetResetPasswordTokenPathParameters,
|
GetResetPasswordTokenPathParameters,
|
||||||
GetUser200,
|
GetUser200,
|
||||||
GetUserPathParameters,
|
GetUserPathParameters,
|
||||||
ListAPIKeys200,
|
ListAPIKeys200,
|
||||||
ListInvite200,
|
|
||||||
ListUsers200,
|
ListUsers200,
|
||||||
RenderErrorResponseDTO,
|
RenderErrorResponseDTO,
|
||||||
RevokeAPIKeyPathParameters,
|
RevokeAPIKeyPathParameters,
|
||||||
TypesChangePasswordRequestDTO,
|
TypesChangePasswordRequestDTO,
|
||||||
TypesPostableAcceptInviteDTO,
|
TypesDeprecatedUserDTO,
|
||||||
TypesPostableAPIKeyDTO,
|
TypesPostableAPIKeyDTO,
|
||||||
TypesPostableBulkInviteRequestDTO,
|
TypesPostableBulkInviteRequestDTO,
|
||||||
TypesPostableForgotPasswordDTO,
|
TypesPostableForgotPasswordDTO,
|
||||||
TypesPostableInviteDTO,
|
TypesPostableInviteDTO,
|
||||||
TypesPostableResetPasswordDTO,
|
TypesPostableResetPasswordDTO,
|
||||||
TypesStorableAPIKeyDTO,
|
TypesStorableAPIKeyDTO,
|
||||||
TypesUserDTO,
|
|
||||||
UpdateAPIKeyPathParameters,
|
UpdateAPIKeyPathParameters,
|
||||||
UpdateUser200,
|
UpdateUser200,
|
||||||
UpdateUserPathParameters,
|
UpdateUserPathParameters,
|
||||||
@@ -255,84 +249,6 @@ export const invalidateGetResetPasswordToken = async (
|
|||||||
return queryClient;
|
return queryClient;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* This endpoint lists all invites
|
|
||||||
* @summary List invites
|
|
||||||
*/
|
|
||||||
export const listInvite = (signal?: AbortSignal) => {
|
|
||||||
return GeneratedAPIInstance<ListInvite200>({
|
|
||||||
url: `/api/v1/invite`,
|
|
||||||
method: 'GET',
|
|
||||||
signal,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getListInviteQueryKey = () => {
|
|
||||||
return [`/api/v1/invite`] as const;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getListInviteQueryOptions = <
|
|
||||||
TData = Awaited<ReturnType<typeof listInvite>>,
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>
|
|
||||||
>(options?: {
|
|
||||||
query?: UseQueryOptions<Awaited<ReturnType<typeof listInvite>>, TError, TData>;
|
|
||||||
}) => {
|
|
||||||
const { query: queryOptions } = options ?? {};
|
|
||||||
|
|
||||||
const queryKey = queryOptions?.queryKey ?? getListInviteQueryKey();
|
|
||||||
|
|
||||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof listInvite>>> = ({
|
|
||||||
signal,
|
|
||||||
}) => listInvite(signal);
|
|
||||||
|
|
||||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
|
||||||
Awaited<ReturnType<typeof listInvite>>,
|
|
||||||
TError,
|
|
||||||
TData
|
|
||||||
> & { queryKey: QueryKey };
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ListInviteQueryResult = NonNullable<
|
|
||||||
Awaited<ReturnType<typeof listInvite>>
|
|
||||||
>;
|
|
||||||
export type ListInviteQueryError = ErrorType<RenderErrorResponseDTO>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary List invites
|
|
||||||
*/
|
|
||||||
|
|
||||||
export function useListInvite<
|
|
||||||
TData = Awaited<ReturnType<typeof listInvite>>,
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>
|
|
||||||
>(options?: {
|
|
||||||
query?: UseQueryOptions<Awaited<ReturnType<typeof listInvite>>, TError, TData>;
|
|
||||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
|
||||||
const queryOptions = getListInviteQueryOptions(options);
|
|
||||||
|
|
||||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
|
||||||
queryKey: QueryKey;
|
|
||||||
};
|
|
||||||
|
|
||||||
query.queryKey = queryOptions.queryKey;
|
|
||||||
|
|
||||||
return query;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary List invites
|
|
||||||
*/
|
|
||||||
export const invalidateListInvite = async (
|
|
||||||
queryClient: QueryClient,
|
|
||||||
options?: InvalidateOptions,
|
|
||||||
): Promise<QueryClient> => {
|
|
||||||
await queryClient.invalidateQueries(
|
|
||||||
{ queryKey: getListInviteQueryKey() },
|
|
||||||
options,
|
|
||||||
);
|
|
||||||
|
|
||||||
return queryClient;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This endpoint creates an invite for a user
|
* This endpoint creates an invite for a user
|
||||||
* @summary Create invite
|
* @summary Create invite
|
||||||
@@ -416,257 +332,6 @@ export const useCreateInvite = <
|
|||||||
|
|
||||||
return useMutation(mutationOptions);
|
return useMutation(mutationOptions);
|
||||||
};
|
};
|
||||||
/**
|
|
||||||
* This endpoint deletes an invite by id
|
|
||||||
* @summary Delete invite
|
|
||||||
*/
|
|
||||||
export const deleteInvite = ({ id }: DeleteInvitePathParameters) => {
|
|
||||||
return GeneratedAPIInstance<void>({
|
|
||||||
url: `/api/v1/invite/${id}`,
|
|
||||||
method: 'DELETE',
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getDeleteInviteMutationOptions = <
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>,
|
|
||||||
TContext = unknown
|
|
||||||
>(options?: {
|
|
||||||
mutation?: UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof deleteInvite>>,
|
|
||||||
TError,
|
|
||||||
{ pathParams: DeleteInvitePathParameters },
|
|
||||||
TContext
|
|
||||||
>;
|
|
||||||
}): UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof deleteInvite>>,
|
|
||||||
TError,
|
|
||||||
{ pathParams: DeleteInvitePathParameters },
|
|
||||||
TContext
|
|
||||||
> => {
|
|
||||||
const mutationKey = ['deleteInvite'];
|
|
||||||
const { mutation: mutationOptions } = options
|
|
||||||
? options.mutation &&
|
|
||||||
'mutationKey' in options.mutation &&
|
|
||||||
options.mutation.mutationKey
|
|
||||||
? options
|
|
||||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
|
||||||
: { mutation: { mutationKey } };
|
|
||||||
|
|
||||||
const mutationFn: MutationFunction<
|
|
||||||
Awaited<ReturnType<typeof deleteInvite>>,
|
|
||||||
{ pathParams: DeleteInvitePathParameters }
|
|
||||||
> = (props) => {
|
|
||||||
const { pathParams } = props ?? {};
|
|
||||||
|
|
||||||
return deleteInvite(pathParams);
|
|
||||||
};
|
|
||||||
|
|
||||||
return { mutationFn, ...mutationOptions };
|
|
||||||
};
|
|
||||||
|
|
||||||
export type DeleteInviteMutationResult = NonNullable<
|
|
||||||
Awaited<ReturnType<typeof deleteInvite>>
|
|
||||||
>;
|
|
||||||
|
|
||||||
export type DeleteInviteMutationError = ErrorType<RenderErrorResponseDTO>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary Delete invite
|
|
||||||
*/
|
|
||||||
export const useDeleteInvite = <
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>,
|
|
||||||
TContext = unknown
|
|
||||||
>(options?: {
|
|
||||||
mutation?: UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof deleteInvite>>,
|
|
||||||
TError,
|
|
||||||
{ pathParams: DeleteInvitePathParameters },
|
|
||||||
TContext
|
|
||||||
>;
|
|
||||||
}): UseMutationResult<
|
|
||||||
Awaited<ReturnType<typeof deleteInvite>>,
|
|
||||||
TError,
|
|
||||||
{ pathParams: DeleteInvitePathParameters },
|
|
||||||
TContext
|
|
||||||
> => {
|
|
||||||
const mutationOptions = getDeleteInviteMutationOptions(options);
|
|
||||||
|
|
||||||
return useMutation(mutationOptions);
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* This endpoint gets an invite by token
|
|
||||||
* @summary Get invite
|
|
||||||
*/
|
|
||||||
export const getInvite = (
|
|
||||||
{ token }: GetInvitePathParameters,
|
|
||||||
signal?: AbortSignal,
|
|
||||||
) => {
|
|
||||||
return GeneratedAPIInstance<GetInvite200>({
|
|
||||||
url: `/api/v1/invite/${token}`,
|
|
||||||
method: 'GET',
|
|
||||||
signal,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getGetInviteQueryKey = ({ token }: GetInvitePathParameters) => {
|
|
||||||
return [`/api/v1/invite/${token}`] as const;
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getGetInviteQueryOptions = <
|
|
||||||
TData = Awaited<ReturnType<typeof getInvite>>,
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>
|
|
||||||
>(
|
|
||||||
{ token }: GetInvitePathParameters,
|
|
||||||
options?: {
|
|
||||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData>;
|
|
||||||
},
|
|
||||||
) => {
|
|
||||||
const { query: queryOptions } = options ?? {};
|
|
||||||
|
|
||||||
const queryKey = queryOptions?.queryKey ?? getGetInviteQueryKey({ token });
|
|
||||||
|
|
||||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getInvite>>> = ({
|
|
||||||
signal,
|
|
||||||
}) => getInvite({ token }, signal);
|
|
||||||
|
|
||||||
return {
|
|
||||||
queryKey,
|
|
||||||
queryFn,
|
|
||||||
enabled: !!token,
|
|
||||||
...queryOptions,
|
|
||||||
} as UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData> & {
|
|
||||||
queryKey: QueryKey;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
export type GetInviteQueryResult = NonNullable<
|
|
||||||
Awaited<ReturnType<typeof getInvite>>
|
|
||||||
>;
|
|
||||||
export type GetInviteQueryError = ErrorType<RenderErrorResponseDTO>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary Get invite
|
|
||||||
*/
|
|
||||||
|
|
||||||
export function useGetInvite<
|
|
||||||
TData = Awaited<ReturnType<typeof getInvite>>,
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>
|
|
||||||
>(
|
|
||||||
{ token }: GetInvitePathParameters,
|
|
||||||
options?: {
|
|
||||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getInvite>>, TError, TData>;
|
|
||||||
},
|
|
||||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
|
||||||
const queryOptions = getGetInviteQueryOptions({ token }, options);
|
|
||||||
|
|
||||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
|
||||||
queryKey: QueryKey;
|
|
||||||
};
|
|
||||||
|
|
||||||
query.queryKey = queryOptions.queryKey;
|
|
||||||
|
|
||||||
return query;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary Get invite
|
|
||||||
*/
|
|
||||||
export const invalidateGetInvite = async (
|
|
||||||
queryClient: QueryClient,
|
|
||||||
{ token }: GetInvitePathParameters,
|
|
||||||
options?: InvalidateOptions,
|
|
||||||
): Promise<QueryClient> => {
|
|
||||||
await queryClient.invalidateQueries(
|
|
||||||
{ queryKey: getGetInviteQueryKey({ token }) },
|
|
||||||
options,
|
|
||||||
);
|
|
||||||
|
|
||||||
return queryClient;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This endpoint accepts an invite by token
|
|
||||||
* @summary Accept invite
|
|
||||||
*/
|
|
||||||
export const acceptInvite = (
|
|
||||||
typesPostableAcceptInviteDTO: BodyType<TypesPostableAcceptInviteDTO>,
|
|
||||||
signal?: AbortSignal,
|
|
||||||
) => {
|
|
||||||
return GeneratedAPIInstance<AcceptInvite201>({
|
|
||||||
url: `/api/v1/invite/accept`,
|
|
||||||
method: 'POST',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
data: typesPostableAcceptInviteDTO,
|
|
||||||
signal,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getAcceptInviteMutationOptions = <
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>,
|
|
||||||
TContext = unknown
|
|
||||||
>(options?: {
|
|
||||||
mutation?: UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof acceptInvite>>,
|
|
||||||
TError,
|
|
||||||
{ data: BodyType<TypesPostableAcceptInviteDTO> },
|
|
||||||
TContext
|
|
||||||
>;
|
|
||||||
}): UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof acceptInvite>>,
|
|
||||||
TError,
|
|
||||||
{ data: BodyType<TypesPostableAcceptInviteDTO> },
|
|
||||||
TContext
|
|
||||||
> => {
|
|
||||||
const mutationKey = ['acceptInvite'];
|
|
||||||
const { mutation: mutationOptions } = options
|
|
||||||
? options.mutation &&
|
|
||||||
'mutationKey' in options.mutation &&
|
|
||||||
options.mutation.mutationKey
|
|
||||||
? options
|
|
||||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
|
||||||
: { mutation: { mutationKey } };
|
|
||||||
|
|
||||||
const mutationFn: MutationFunction<
|
|
||||||
Awaited<ReturnType<typeof acceptInvite>>,
|
|
||||||
{ data: BodyType<TypesPostableAcceptInviteDTO> }
|
|
||||||
> = (props) => {
|
|
||||||
const { data } = props ?? {};
|
|
||||||
|
|
||||||
return acceptInvite(data);
|
|
||||||
};
|
|
||||||
|
|
||||||
return { mutationFn, ...mutationOptions };
|
|
||||||
};
|
|
||||||
|
|
||||||
export type AcceptInviteMutationResult = NonNullable<
|
|
||||||
Awaited<ReturnType<typeof acceptInvite>>
|
|
||||||
>;
|
|
||||||
export type AcceptInviteMutationBody = BodyType<TypesPostableAcceptInviteDTO>;
|
|
||||||
export type AcceptInviteMutationError = ErrorType<RenderErrorResponseDTO>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary Accept invite
|
|
||||||
*/
|
|
||||||
export const useAcceptInvite = <
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>,
|
|
||||||
TContext = unknown
|
|
||||||
>(options?: {
|
|
||||||
mutation?: UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof acceptInvite>>,
|
|
||||||
TError,
|
|
||||||
{ data: BodyType<TypesPostableAcceptInviteDTO> },
|
|
||||||
TContext
|
|
||||||
>;
|
|
||||||
}): UseMutationResult<
|
|
||||||
Awaited<ReturnType<typeof acceptInvite>>,
|
|
||||||
TError,
|
|
||||||
{ data: BodyType<TypesPostableAcceptInviteDTO> },
|
|
||||||
TContext
|
|
||||||
> => {
|
|
||||||
const mutationOptions = getAcceptInviteMutationOptions(options);
|
|
||||||
|
|
||||||
return useMutation(mutationOptions);
|
|
||||||
};
|
|
||||||
/**
|
/**
|
||||||
* This endpoint creates a bulk invite for a user
|
* This endpoint creates a bulk invite for a user
|
||||||
* @summary Create bulk invite
|
* @summary Create bulk invite
|
||||||
@@ -1428,13 +1093,13 @@ export const invalidateGetUser = async (
|
|||||||
*/
|
*/
|
||||||
export const updateUser = (
|
export const updateUser = (
|
||||||
{ id }: UpdateUserPathParameters,
|
{ id }: UpdateUserPathParameters,
|
||||||
typesUserDTO: BodyType<TypesUserDTO>,
|
typesDeprecatedUserDTO: BodyType<TypesDeprecatedUserDTO>,
|
||||||
) => {
|
) => {
|
||||||
return GeneratedAPIInstance<UpdateUser200>({
|
return GeneratedAPIInstance<UpdateUser200>({
|
||||||
url: `/api/v1/user/${id}`,
|
url: `/api/v1/user/${id}`,
|
||||||
method: 'PUT',
|
method: 'PUT',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
data: typesUserDTO,
|
data: typesDeprecatedUserDTO,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -1445,13 +1110,19 @@ export const getUpdateUserMutationOptions = <
|
|||||||
mutation?: UseMutationOptions<
|
mutation?: UseMutationOptions<
|
||||||
Awaited<ReturnType<typeof updateUser>>,
|
Awaited<ReturnType<typeof updateUser>>,
|
||||||
TError,
|
TError,
|
||||||
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
|
{
|
||||||
|
pathParams: UpdateUserPathParameters;
|
||||||
|
data: BodyType<TypesDeprecatedUserDTO>;
|
||||||
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
}): UseMutationOptions<
|
}): UseMutationOptions<
|
||||||
Awaited<ReturnType<typeof updateUser>>,
|
Awaited<ReturnType<typeof updateUser>>,
|
||||||
TError,
|
TError,
|
||||||
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
|
{
|
||||||
|
pathParams: UpdateUserPathParameters;
|
||||||
|
data: BodyType<TypesDeprecatedUserDTO>;
|
||||||
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
const mutationKey = ['updateUser'];
|
const mutationKey = ['updateUser'];
|
||||||
@@ -1465,7 +1136,10 @@ export const getUpdateUserMutationOptions = <
|
|||||||
|
|
||||||
const mutationFn: MutationFunction<
|
const mutationFn: MutationFunction<
|
||||||
Awaited<ReturnType<typeof updateUser>>,
|
Awaited<ReturnType<typeof updateUser>>,
|
||||||
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> }
|
{
|
||||||
|
pathParams: UpdateUserPathParameters;
|
||||||
|
data: BodyType<TypesDeprecatedUserDTO>;
|
||||||
|
}
|
||||||
> = (props) => {
|
> = (props) => {
|
||||||
const { pathParams, data } = props ?? {};
|
const { pathParams, data } = props ?? {};
|
||||||
|
|
||||||
@@ -1478,7 +1152,7 @@ export const getUpdateUserMutationOptions = <
|
|||||||
export type UpdateUserMutationResult = NonNullable<
|
export type UpdateUserMutationResult = NonNullable<
|
||||||
Awaited<ReturnType<typeof updateUser>>
|
Awaited<ReturnType<typeof updateUser>>
|
||||||
>;
|
>;
|
||||||
export type UpdateUserMutationBody = BodyType<TypesUserDTO>;
|
export type UpdateUserMutationBody = BodyType<TypesDeprecatedUserDTO>;
|
||||||
export type UpdateUserMutationError = ErrorType<RenderErrorResponseDTO>;
|
export type UpdateUserMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1491,13 +1165,19 @@ export const useUpdateUser = <
|
|||||||
mutation?: UseMutationOptions<
|
mutation?: UseMutationOptions<
|
||||||
Awaited<ReturnType<typeof updateUser>>,
|
Awaited<ReturnType<typeof updateUser>>,
|
||||||
TError,
|
TError,
|
||||||
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
|
{
|
||||||
|
pathParams: UpdateUserPathParameters;
|
||||||
|
data: BodyType<TypesDeprecatedUserDTO>;
|
||||||
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
}): UseMutationResult<
|
}): UseMutationResult<
|
||||||
Awaited<ReturnType<typeof updateUser>>,
|
Awaited<ReturnType<typeof updateUser>>,
|
||||||
TError,
|
TError,
|
||||||
{ pathParams: UpdateUserPathParameters; data: BodyType<TypesUserDTO> },
|
{
|
||||||
|
pathParams: UpdateUserPathParameters;
|
||||||
|
data: BodyType<TypesDeprecatedUserDTO>;
|
||||||
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
const mutationOptions = getUpdateUserMutationOptions(options);
|
const mutationOptions = getUpdateUserMutationOptions(options);
|
||||||
|
|||||||
@@ -81,7 +81,8 @@ export const interceptorRejected = async (
|
|||||||
response.config.url !== '/sessions/email_password' &&
|
response.config.url !== '/sessions/email_password' &&
|
||||||
!(
|
!(
|
||||||
response.config.url === '/sessions' && response.config.method === 'delete'
|
response.config.url === '/sessions' && response.config.method === 'delete'
|
||||||
)
|
) &&
|
||||||
|
response.config.url !== '/authz/check'
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
const accessToken = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN);
|
const accessToken = getLocalStorageApi(LOCALSTORAGE.AUTH_TOKEN);
|
||||||
|
|||||||
152
frontend/src/api/interceptors.test.ts
Normal file
152
frontend/src/api/interceptors.test.ts
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
import axios, { AxiosHeaders, AxiosResponse } from 'axios';
|
||||||
|
|
||||||
|
import { interceptorRejected } from './index';
|
||||||
|
|
||||||
|
jest.mock('api/browser/localstorage/get', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: jest.fn(() => 'mock-token'),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('api/v2/sessions/rotate/post', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: jest.fn(() =>
|
||||||
|
Promise.resolve({
|
||||||
|
data: { accessToken: 'new-token', refreshToken: 'new-refresh' },
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('AppRoutes/utils', () => ({
|
||||||
|
__esModule: true,
|
||||||
|
default: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('axios', () => {
|
||||||
|
const actualAxios = jest.requireActual('axios');
|
||||||
|
const mockAxios = jest.fn().mockResolvedValue({ data: 'success' });
|
||||||
|
|
||||||
|
return {
|
||||||
|
...actualAxios,
|
||||||
|
default: Object.assign(mockAxios, {
|
||||||
|
...actualAxios.default,
|
||||||
|
isAxiosError: jest.fn().mockReturnValue(true),
|
||||||
|
create: actualAxios.create,
|
||||||
|
}),
|
||||||
|
__esModule: true,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('interceptorRejected', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
((axios as unknown) as jest.Mock).mockResolvedValue({ data: 'success' });
|
||||||
|
((axios.isAxiosError as unknown) as jest.Mock).mockReturnValue(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve array payload structure when retrying a 401 request', async () => {
|
||||||
|
const arrayPayload = [
|
||||||
|
{ relation: 'assignee', object: { resource: { name: 'role' } } },
|
||||||
|
{ relation: 'assignee', object: { resource: { name: 'editor' } } },
|
||||||
|
];
|
||||||
|
|
||||||
|
const error = ({
|
||||||
|
response: {
|
||||||
|
status: 401,
|
||||||
|
config: {
|
||||||
|
url: '/some-endpoint',
|
||||||
|
method: 'POST',
|
||||||
|
baseURL: 'http://localhost/',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
data: JSON.stringify(arrayPayload),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
config: {
|
||||||
|
url: '/some-endpoint',
|
||||||
|
method: 'POST',
|
||||||
|
baseURL: 'http://localhost/',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
data: JSON.stringify(arrayPayload),
|
||||||
|
},
|
||||||
|
} as unknown) as AxiosResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await interceptorRejected(error);
|
||||||
|
} catch {
|
||||||
|
// Expected to reject after retry
|
||||||
|
}
|
||||||
|
|
||||||
|
const mockAxiosFn = (axios as unknown) as jest.Mock;
|
||||||
|
expect(mockAxiosFn.mock.calls.length).toBe(1);
|
||||||
|
const retryCallConfig = mockAxiosFn.mock.calls[0][0];
|
||||||
|
expect(Array.isArray(JSON.parse(retryCallConfig.data))).toBe(true);
|
||||||
|
expect(JSON.parse(retryCallConfig.data)).toEqual(arrayPayload);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve object payload structure when retrying a 401 request', async () => {
|
||||||
|
const objectPayload = { key: 'value', nested: { data: 123 } };
|
||||||
|
|
||||||
|
const error = ({
|
||||||
|
response: {
|
||||||
|
status: 401,
|
||||||
|
config: {
|
||||||
|
url: '/some-endpoint',
|
||||||
|
method: 'POST',
|
||||||
|
baseURL: 'http://localhost/',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
data: JSON.stringify(objectPayload),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
config: {
|
||||||
|
url: '/some-endpoint',
|
||||||
|
method: 'POST',
|
||||||
|
baseURL: 'http://localhost/',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
data: JSON.stringify(objectPayload),
|
||||||
|
},
|
||||||
|
} as unknown) as AxiosResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await interceptorRejected(error);
|
||||||
|
} catch {
|
||||||
|
// Expected to reject after retry
|
||||||
|
}
|
||||||
|
|
||||||
|
const mockAxiosFn = (axios as unknown) as jest.Mock;
|
||||||
|
expect(mockAxiosFn.mock.calls.length).toBe(1);
|
||||||
|
const retryCallConfig = mockAxiosFn.mock.calls[0][0];
|
||||||
|
expect(JSON.parse(retryCallConfig.data)).toEqual(objectPayload);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle undefined data gracefully when retrying', async () => {
|
||||||
|
const error = ({
|
||||||
|
response: {
|
||||||
|
status: 401,
|
||||||
|
config: {
|
||||||
|
url: '/some-endpoint',
|
||||||
|
method: 'GET',
|
||||||
|
baseURL: 'http://localhost/',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
data: undefined,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
config: {
|
||||||
|
url: '/some-endpoint',
|
||||||
|
method: 'GET',
|
||||||
|
baseURL: 'http://localhost/',
|
||||||
|
headers: new AxiosHeaders(),
|
||||||
|
data: undefined,
|
||||||
|
},
|
||||||
|
} as unknown) as AxiosResponse;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await interceptorRejected(error);
|
||||||
|
} catch {
|
||||||
|
// Expected to reject after retry
|
||||||
|
}
|
||||||
|
|
||||||
|
const mockAxiosFn = (axios as unknown) as jest.Mock;
|
||||||
|
expect(mockAxiosFn.mock.calls.length).toBe(1);
|
||||||
|
const retryCallConfig = mockAxiosFn.mock.calls[0][0];
|
||||||
|
expect(retryCallConfig.data).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -8,42 +8,32 @@ export const downloadExportData = async (
|
|||||||
props: ExportRawDataProps,
|
props: ExportRawDataProps,
|
||||||
): Promise<void> => {
|
): Promise<void> => {
|
||||||
try {
|
try {
|
||||||
const queryParams = new URLSearchParams();
|
const response = await axios.post<Blob>(
|
||||||
|
`export_raw_data?format=${encodeURIComponent(props.format)}`,
|
||||||
queryParams.append('start', String(props.start));
|
props.body,
|
||||||
queryParams.append('end', String(props.end));
|
{
|
||||||
queryParams.append('filter', props.filter);
|
responseType: 'blob',
|
||||||
props.columns.forEach((col) => {
|
decompress: true,
|
||||||
queryParams.append('columns', col);
|
headers: {
|
||||||
});
|
Accept: 'application/octet-stream',
|
||||||
queryParams.append('order_by', props.orderBy);
|
'Content-Type': 'application/json',
|
||||||
queryParams.append('limit', String(props.limit));
|
},
|
||||||
queryParams.append('format', props.format);
|
timeout: 0,
|
||||||
|
|
||||||
const response = await axios.get<Blob>(`export_raw_data?${queryParams}`, {
|
|
||||||
responseType: 'blob', // Important: tell axios to handle response as blob
|
|
||||||
decompress: true, // Enable automatic decompression
|
|
||||||
headers: {
|
|
||||||
Accept: 'application/octet-stream', // Tell server we expect binary data
|
|
||||||
},
|
},
|
||||||
timeout: 0,
|
);
|
||||||
});
|
|
||||||
|
|
||||||
// Only proceed if the response status is 200
|
|
||||||
if (response.status !== 200) {
|
if (response.status !== 200) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to download data: server returned status ${response.status}`,
|
`Failed to download data: server returned status ${response.status}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Create blob URL from response data
|
|
||||||
const blob = new Blob([response.data], { type: 'application/octet-stream' });
|
const blob = new Blob([response.data], { type: 'application/octet-stream' });
|
||||||
const url = window.URL.createObjectURL(blob);
|
const url = window.URL.createObjectURL(blob);
|
||||||
|
|
||||||
// Create and configure download link
|
|
||||||
const link = document.createElement('a');
|
const link = document.createElement('a');
|
||||||
link.href = url;
|
link.href = url;
|
||||||
|
|
||||||
// Get filename from Content-Disposition header or generate timestamped default
|
|
||||||
const filename =
|
const filename =
|
||||||
response.headers['content-disposition']
|
response.headers['content-disposition']
|
||||||
?.split('filename=')[1]
|
?.split('filename=')[1]
|
||||||
@@ -51,7 +41,6 @@ export const downloadExportData = async (
|
|||||||
|
|
||||||
link.setAttribute('download', filename);
|
link.setAttribute('download', filename);
|
||||||
|
|
||||||
// Trigger download
|
|
||||||
document.body.appendChild(link);
|
document.body.appendChild(link);
|
||||||
link.click();
|
link.click();
|
||||||
link.remove();
|
link.remove();
|
||||||
|
|||||||
@@ -1,19 +0,0 @@
|
|||||||
import axios from 'api';
|
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
|
||||||
import { PayloadProps, PendingInvite } from 'types/api/user/getPendingInvites';
|
|
||||||
|
|
||||||
const get = async (): Promise<SuccessResponseV2<PendingInvite[]>> => {
|
|
||||||
try {
|
|
||||||
const response = await axios.get<PayloadProps>(`/invite`);
|
|
||||||
return {
|
|
||||||
httpStatusCode: response.status,
|
|
||||||
data: response.data.data,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export default get;
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
import axios from 'api';
|
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
|
||||||
import { PayloadProps, Props } from 'types/api/user/accept';
|
|
||||||
import { UserResponse } from 'types/api/user/getUser';
|
|
||||||
|
|
||||||
const accept = async (
|
|
||||||
props: Props,
|
|
||||||
): Promise<SuccessResponseV2<UserResponse>> => {
|
|
||||||
try {
|
|
||||||
const response = await axios.post<PayloadProps>(`/invite/accept`, props);
|
|
||||||
return {
|
|
||||||
httpStatusCode: response.status,
|
|
||||||
data: response.data.data,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export default accept;
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
import axios from 'api';
|
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
|
||||||
import { Props } from 'types/api/user/deleteInvite';
|
|
||||||
|
|
||||||
const del = async (props: Props): Promise<SuccessResponseV2<null>> => {
|
|
||||||
try {
|
|
||||||
const response = await axios.delete(`/invite/${props.id}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
httpStatusCode: response.status,
|
|
||||||
data: null,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export default del;
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import axios from 'api';
|
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
|
||||||
import {
|
|
||||||
InviteDetails,
|
|
||||||
PayloadProps,
|
|
||||||
Props,
|
|
||||||
} from 'types/api/user/getInviteDetails';
|
|
||||||
|
|
||||||
const getInviteDetails = async (
|
|
||||||
props: Props,
|
|
||||||
): Promise<SuccessResponseV2<InviteDetails>> => {
|
|
||||||
try {
|
|
||||||
const response = await axios.get<PayloadProps>(
|
|
||||||
`/invite/${props.inviteId}?ref=${window.location.href}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
|
||||||
httpStatusCode: response.status,
|
|
||||||
data: response.data.data,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export default getInviteDetails;
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
import axios from 'api';
|
|
||||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
|
||||||
import { AxiosError } from 'axios';
|
|
||||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
|
||||||
import { Props } from 'types/api/user/deleteUser';
|
|
||||||
|
|
||||||
const deleteUser = async (props: Props): Promise<SuccessResponseV2<null>> => {
|
|
||||||
try {
|
|
||||||
const response = await axios.delete(`/user/${props.userId}`);
|
|
||||||
|
|
||||||
return {
|
|
||||||
httpStatusCode: response.status,
|
|
||||||
data: null,
|
|
||||||
};
|
|
||||||
} catch (error) {
|
|
||||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
export default deleteUser;
|
|
||||||
@@ -1,8 +1,14 @@
|
|||||||
function UnAuthorized(): JSX.Element {
|
function UnAuthorized({
|
||||||
|
width = 137,
|
||||||
|
height = 137,
|
||||||
|
}: {
|
||||||
|
height?: number;
|
||||||
|
width?: number;
|
||||||
|
}): JSX.Element {
|
||||||
return (
|
return (
|
||||||
<svg
|
<svg
|
||||||
width="137"
|
width={width}
|
||||||
height="137"
|
height={height}
|
||||||
viewBox="0 0 137 137"
|
viewBox="0 0 137 137"
|
||||||
fill="none"
|
fill="none"
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
|||||||
1
frontend/src/auto-import-registry.d.ts
vendored
1
frontend/src/auto-import-registry.d.ts
vendored
@@ -30,3 +30,4 @@ import '@signozhq/switch';
|
|||||||
import '@signozhq/table';
|
import '@signozhq/table';
|
||||||
import '@signozhq/toggle-group';
|
import '@signozhq/toggle-group';
|
||||||
import '@signozhq/tooltip';
|
import '@signozhq/tooltip';
|
||||||
|
import '@signozhq/ui';
|
||||||
|
|||||||
@@ -0,0 +1,97 @@
|
|||||||
|
.announcement-banner {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: var(--spacing-4);
|
||||||
|
padding: var(--padding-2) var(--padding-4);
|
||||||
|
height: 40px;
|
||||||
|
font-family: var(--font-sans), sans-serif;
|
||||||
|
font-size: var(--label-base-500-font-size);
|
||||||
|
line-height: var(--label-base-500-line-height);
|
||||||
|
font-weight: var(--label-base-500-font-weight);
|
||||||
|
letter-spacing: -0.065px;
|
||||||
|
|
||||||
|
&--warning {
|
||||||
|
background-color: var(--callout-warning-background);
|
||||||
|
color: var(--callout-warning-description);
|
||||||
|
.announcement-banner__action,
|
||||||
|
.announcement-banner__dismiss {
|
||||||
|
background: var(--callout-warning-border);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&--info {
|
||||||
|
background-color: var(--callout-primary-background);
|
||||||
|
color: var(--callout-primary-description);
|
||||||
|
.announcement-banner__action,
|
||||||
|
.announcement-banner__dismiss {
|
||||||
|
background: var(--callout-primary-border);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&--error {
|
||||||
|
background-color: var(--callout-error-background);
|
||||||
|
color: var(--callout-error-description);
|
||||||
|
.announcement-banner__action,
|
||||||
|
.announcement-banner__dismiss {
|
||||||
|
background: var(--callout-error-border);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&--success {
|
||||||
|
background-color: var(--callout-success-background);
|
||||||
|
color: var(--callout-success-description);
|
||||||
|
.announcement-banner__action,
|
||||||
|
.announcement-banner__dismiss {
|
||||||
|
background: var(--callout-success-border);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__body {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-4);
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__icon {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__message {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
line-height: var(--line-height-normal);
|
||||||
|
|
||||||
|
strong {
|
||||||
|
font-weight: var(--font-weight-semibold);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__action {
|
||||||
|
height: 24px;
|
||||||
|
font-size: var(--label-small-500-font-size);
|
||||||
|
color: currentColor;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__dismiss {
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
padding: 0;
|
||||||
|
color: currentColor;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,89 @@
|
|||||||
|
import { render, screen, userEvent } from 'tests/test-utils';
|
||||||
|
|
||||||
|
import {
|
||||||
|
AnnouncementBanner,
|
||||||
|
AnnouncementBannerProps,
|
||||||
|
PersistedAnnouncementBanner,
|
||||||
|
} from './index';
|
||||||
|
|
||||||
|
const STORAGE_KEY = 'test-banner-dismissed';
|
||||||
|
|
||||||
|
function renderBanner(props: Partial<AnnouncementBannerProps> = {}): void {
|
||||||
|
render(<AnnouncementBanner message="Test message" {...props} />);
|
||||||
|
}
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
localStorage.removeItem(STORAGE_KEY);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('AnnouncementBanner', () => {
|
||||||
|
it('renders message and default warning variant', () => {
|
||||||
|
renderBanner({ message: <strong>Heads up</strong> });
|
||||||
|
|
||||||
|
const alert = screen.getByRole('alert');
|
||||||
|
expect(alert).toHaveClass('announcement-banner--warning');
|
||||||
|
expect(alert).toHaveTextContent('Heads up');
|
||||||
|
});
|
||||||
|
|
||||||
|
it.each(['warning', 'info', 'success', 'error'] as const)(
|
||||||
|
'renders %s variant correctly',
|
||||||
|
(type) => {
|
||||||
|
renderBanner({ type, message: 'Test message' });
|
||||||
|
const alert = screen.getByRole('alert');
|
||||||
|
expect(alert).toHaveClass(`announcement-banner--${type}`);
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
it('calls action onClick when action button is clicked', async () => {
|
||||||
|
const onClick = jest.fn() as jest.MockedFunction<() => void>;
|
||||||
|
renderBanner({ action: { label: 'Go to Settings', onClick } });
|
||||||
|
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
await user.click(screen.getByRole('button', { name: /go to settings/i }));
|
||||||
|
|
||||||
|
expect(onClick).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('hides dismiss button when onClose is not provided and hides icon when icon is null', () => {
|
||||||
|
renderBanner({ onClose: undefined, icon: null });
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.queryByRole('button', { name: /dismiss/i }),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.queryByRole('alert')?.querySelector('.announcement-banner__icon'),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('PersistedAnnouncementBanner', () => {
|
||||||
|
it('dismisses on click, calls onDismiss, and persists to localStorage', async () => {
|
||||||
|
const onDismiss = jest.fn() as jest.MockedFunction<() => void>;
|
||||||
|
render(
|
||||||
|
<PersistedAnnouncementBanner
|
||||||
|
message="Test message"
|
||||||
|
storageKey={STORAGE_KEY}
|
||||||
|
onDismiss={onDismiss}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
await user.click(screen.getByRole('button', { name: /dismiss/i }));
|
||||||
|
|
||||||
|
expect(screen.queryByRole('alert')).not.toBeInTheDocument();
|
||||||
|
expect(onDismiss).toHaveBeenCalledTimes(1);
|
||||||
|
expect(localStorage.getItem(STORAGE_KEY)).toBe('true');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('does not render when storageKey is already set in localStorage', () => {
|
||||||
|
localStorage.setItem(STORAGE_KEY, 'true');
|
||||||
|
render(
|
||||||
|
<PersistedAnnouncementBanner
|
||||||
|
message="Test message"
|
||||||
|
storageKey={STORAGE_KEY}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.queryByRole('alert')).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -0,0 +1,84 @@
|
|||||||
|
import { ReactNode } from 'react';
|
||||||
|
import { Button } from '@signozhq/button';
|
||||||
|
import {
|
||||||
|
CircleAlert,
|
||||||
|
CircleCheckBig,
|
||||||
|
Info,
|
||||||
|
TriangleAlert,
|
||||||
|
X,
|
||||||
|
} from '@signozhq/icons';
|
||||||
|
import cx from 'classnames';
|
||||||
|
|
||||||
|
import './AnnouncementBanner.styles.scss';
|
||||||
|
|
||||||
|
export type AnnouncementBannerType = 'warning' | 'info' | 'error' | 'success';
|
||||||
|
|
||||||
|
export interface AnnouncementBannerAction {
|
||||||
|
label: string;
|
||||||
|
onClick: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnnouncementBannerProps {
|
||||||
|
message: ReactNode;
|
||||||
|
type?: AnnouncementBannerType;
|
||||||
|
icon?: ReactNode | null;
|
||||||
|
action?: AnnouncementBannerAction;
|
||||||
|
onClose?: () => void;
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const DEFAULT_ICONS: Record<AnnouncementBannerType, ReactNode> = {
|
||||||
|
warning: <TriangleAlert size={14} />,
|
||||||
|
info: <Info size={14} />,
|
||||||
|
error: <CircleAlert size={14} />,
|
||||||
|
success: <CircleCheckBig size={14} />,
|
||||||
|
};
|
||||||
|
|
||||||
|
export default function AnnouncementBanner({
|
||||||
|
message,
|
||||||
|
type = 'warning',
|
||||||
|
icon,
|
||||||
|
action,
|
||||||
|
onClose,
|
||||||
|
className,
|
||||||
|
}: AnnouncementBannerProps): JSX.Element {
|
||||||
|
const resolvedIcon = icon === null ? null : icon ?? DEFAULT_ICONS[type];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
role="alert"
|
||||||
|
className={cx(
|
||||||
|
'announcement-banner',
|
||||||
|
`announcement-banner--${type}`,
|
||||||
|
className,
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<div className="announcement-banner__body">
|
||||||
|
{resolvedIcon && (
|
||||||
|
<span className="announcement-banner__icon">{resolvedIcon}</span>
|
||||||
|
)}
|
||||||
|
<span className="announcement-banner__message">{message}</span>
|
||||||
|
{action && (
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
className="announcement-banner__action"
|
||||||
|
onClick={action.onClick}
|
||||||
|
>
|
||||||
|
{action.label}
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{onClose && (
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
aria-label="Dismiss"
|
||||||
|
className="announcement-banner__dismiss"
|
||||||
|
onClick={onClose}
|
||||||
|
>
|
||||||
|
<X size={14} />
|
||||||
|
</Button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
import { useState } from 'react';
|
||||||
|
|
||||||
|
import AnnouncementBanner, {
|
||||||
|
AnnouncementBannerProps,
|
||||||
|
} from './AnnouncementBanner';
|
||||||
|
|
||||||
|
interface PersistedAnnouncementBannerProps extends AnnouncementBannerProps {
|
||||||
|
storageKey: string;
|
||||||
|
onDismiss?: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isDismissed(storageKey: string): boolean {
|
||||||
|
return localStorage.getItem(storageKey) === 'true';
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function PersistedAnnouncementBanner({
|
||||||
|
storageKey,
|
||||||
|
onDismiss,
|
||||||
|
...props
|
||||||
|
}: PersistedAnnouncementBannerProps): JSX.Element | null {
|
||||||
|
const [visible, setVisible] = useState(() => !isDismissed(storageKey));
|
||||||
|
|
||||||
|
if (!visible) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleClose = (): void => {
|
||||||
|
localStorage.setItem(storageKey, 'true');
|
||||||
|
setVisible(false);
|
||||||
|
onDismiss?.();
|
||||||
|
};
|
||||||
|
|
||||||
|
return <AnnouncementBanner {...props} onClose={handleClose} />;
|
||||||
|
}
|
||||||
12
frontend/src/components/AnnouncementBanner/index.ts
Normal file
12
frontend/src/components/AnnouncementBanner/index.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import AnnouncementBanner from './AnnouncementBanner';
|
||||||
|
import PersistedAnnouncementBanner from './PersistedAnnouncementBanner';
|
||||||
|
|
||||||
|
export type {
|
||||||
|
AnnouncementBannerAction,
|
||||||
|
AnnouncementBannerProps,
|
||||||
|
AnnouncementBannerType,
|
||||||
|
} from './AnnouncementBanner';
|
||||||
|
|
||||||
|
export { AnnouncementBanner, PersistedAnnouncementBanner };
|
||||||
|
|
||||||
|
export default AnnouncementBanner;
|
||||||
@@ -0,0 +1,106 @@
|
|||||||
|
.create-sa-modal {
|
||||||
|
max-width: 530px;
|
||||||
|
background: var(--popover);
|
||||||
|
border: 1px solid var(--secondary);
|
||||||
|
border-radius: 4px;
|
||||||
|
box-shadow: 0 4px 9px 0 rgba(0, 0, 0, 0.04);
|
||||||
|
|
||||||
|
[data-slot='dialog-header'] {
|
||||||
|
padding: var(--padding-4);
|
||||||
|
border-bottom: 1px solid var(--secondary);
|
||||||
|
flex-shrink: 0;
|
||||||
|
background: transparent;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-slot='dialog-title'] {
|
||||||
|
font-size: var(--label-base-400-font-size);
|
||||||
|
font-weight: var(--label-base-400-font-weight);
|
||||||
|
line-height: var(--label-base-400-line-height);
|
||||||
|
letter-spacing: -0.065px;
|
||||||
|
color: var(--bg-base-white);
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-slot='dialog-description'] {
|
||||||
|
padding: 0;
|
||||||
|
|
||||||
|
.create-sa-modal__content {
|
||||||
|
padding: var(--padding-4);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-sa-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-2);
|
||||||
|
|
||||||
|
&__item {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-1);
|
||||||
|
margin-bottom: var(--spacing-4);
|
||||||
|
|
||||||
|
> label {
|
||||||
|
font-size: var(--paragraph-base-400-font-size);
|
||||||
|
font-weight: var(--paragraph-base-400-font-weight);
|
||||||
|
color: var(--foreground);
|
||||||
|
letter-spacing: -0.07px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__input {
|
||||||
|
height: 32px;
|
||||||
|
color: var(--l1-foreground);
|
||||||
|
background-color: var(--l2-background);
|
||||||
|
border-color: var(--border);
|
||||||
|
font-size: var(--paragraph-base-400-font-size);
|
||||||
|
border-radius: 2px;
|
||||||
|
width: 100%;
|
||||||
|
|
||||||
|
&::placeholder {
|
||||||
|
color: var(--l3-foreground);
|
||||||
|
}
|
||||||
|
|
||||||
|
&:focus {
|
||||||
|
border-color: var(--primary);
|
||||||
|
box-shadow: none;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__error {
|
||||||
|
font-size: var(--paragraph-small-400-font-size);
|
||||||
|
color: var(--destructive);
|
||||||
|
line-height: var(--paragraph-small-400-line-height);
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__helper {
|
||||||
|
font-size: var(--paragraph-small-400-font-size);
|
||||||
|
color: var(--l3-foreground);
|
||||||
|
margin: calc(var(--spacing-2) * -1) 0 var(--spacing-4) 0;
|
||||||
|
line-height: var(--paragraph-small-400-line-height);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.create-sa-modal__footer {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: flex-end;
|
||||||
|
padding: 0 var(--padding-4);
|
||||||
|
height: 56px;
|
||||||
|
min-height: 56px;
|
||||||
|
border-top: 1px solid var(--secondary);
|
||||||
|
gap: var(--spacing-4);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.lightMode {
|
||||||
|
.create-sa-modal {
|
||||||
|
[data-slot='dialog-title'] {
|
||||||
|
color: var(--bg-base-black);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,230 @@
|
|||||||
|
import { Controller, useForm } from 'react-hook-form';
|
||||||
|
import { useQueryClient } from 'react-query';
|
||||||
|
import { Button } from '@signozhq/button';
|
||||||
|
import { DialogFooter, DialogWrapper } from '@signozhq/dialog';
|
||||||
|
import { X } from '@signozhq/icons';
|
||||||
|
import { Input } from '@signozhq/input';
|
||||||
|
import { toast } from '@signozhq/sonner';
|
||||||
|
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||||
|
import {
|
||||||
|
invalidateListServiceAccounts,
|
||||||
|
useCreateServiceAccount,
|
||||||
|
} from 'api/generated/services/serviceaccount';
|
||||||
|
import type { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import { AxiosError } from 'axios';
|
||||||
|
import RolesSelect, { useRoles } from 'components/RolesSelect';
|
||||||
|
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||||
|
import { parseAsBoolean, useQueryState } from 'nuqs';
|
||||||
|
import { EMAIL_REGEX } from 'utils/app';
|
||||||
|
|
||||||
|
import './CreateServiceAccountModal.styles.scss';
|
||||||
|
|
||||||
|
interface FormValues {
|
||||||
|
name: string;
|
||||||
|
email: string;
|
||||||
|
roles: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
function CreateServiceAccountModal(): JSX.Element {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const [isOpen, setIsOpen] = useQueryState(
|
||||||
|
SA_QUERY_PARAMS.CREATE_SA,
|
||||||
|
parseAsBoolean.withDefault(false),
|
||||||
|
);
|
||||||
|
|
||||||
|
const {
|
||||||
|
control,
|
||||||
|
handleSubmit,
|
||||||
|
reset,
|
||||||
|
formState: { isValid, errors },
|
||||||
|
} = useForm<FormValues>({
|
||||||
|
mode: 'onChange',
|
||||||
|
defaultValues: {
|
||||||
|
name: '',
|
||||||
|
email: '',
|
||||||
|
roles: [],
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const {
|
||||||
|
mutate: createServiceAccount,
|
||||||
|
isLoading: isSubmitting,
|
||||||
|
} = useCreateServiceAccount({
|
||||||
|
mutation: {
|
||||||
|
onSuccess: async () => {
|
||||||
|
toast.success('Service account created successfully', {
|
||||||
|
richColors: true,
|
||||||
|
});
|
||||||
|
reset();
|
||||||
|
await setIsOpen(null);
|
||||||
|
await invalidateListServiceAccounts(queryClient);
|
||||||
|
},
|
||||||
|
onError: (err) => {
|
||||||
|
const errMessage =
|
||||||
|
convertToApiError(
|
||||||
|
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||||
|
)?.getErrorMessage() || 'An error occurred';
|
||||||
|
toast.error(`Failed to create service account: ${errMessage}`, {
|
||||||
|
richColors: true,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const {
|
||||||
|
roles,
|
||||||
|
isLoading: rolesLoading,
|
||||||
|
isError: rolesError,
|
||||||
|
error: rolesErrorObj,
|
||||||
|
refetch: refetchRoles,
|
||||||
|
} = useRoles();
|
||||||
|
|
||||||
|
function handleClose(): void {
|
||||||
|
reset();
|
||||||
|
setIsOpen(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCreate(values: FormValues): void {
|
||||||
|
createServiceAccount({
|
||||||
|
data: {
|
||||||
|
name: values.name.trim(),
|
||||||
|
email: values.email.trim(),
|
||||||
|
roles: values.roles,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DialogWrapper
|
||||||
|
title="New Service Account"
|
||||||
|
open={isOpen}
|
||||||
|
onOpenChange={(open): void => {
|
||||||
|
if (!open) {
|
||||||
|
handleClose();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
showCloseButton
|
||||||
|
width="narrow"
|
||||||
|
className="create-sa-modal"
|
||||||
|
disableOutsideClick={false}
|
||||||
|
>
|
||||||
|
<div className="create-sa-modal__content">
|
||||||
|
<form
|
||||||
|
id="create-sa-form"
|
||||||
|
className="create-sa-form"
|
||||||
|
onSubmit={handleSubmit(handleCreate)}
|
||||||
|
>
|
||||||
|
<div className="create-sa-form__item">
|
||||||
|
<label htmlFor="sa-name">Name</label>
|
||||||
|
<Controller
|
||||||
|
name="name"
|
||||||
|
control={control}
|
||||||
|
rules={{ required: 'Name is required' }}
|
||||||
|
render={({ field }): JSX.Element => (
|
||||||
|
<Input
|
||||||
|
id="sa-name"
|
||||||
|
placeholder="Enter a name"
|
||||||
|
className="create-sa-form__input"
|
||||||
|
value={field.value}
|
||||||
|
onChange={field.onChange}
|
||||||
|
onBlur={field.onBlur}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
{errors.name && (
|
||||||
|
<p className="create-sa-form__error">{errors.name.message}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="create-sa-form__item">
|
||||||
|
<label htmlFor="sa-email">Email Address</label>
|
||||||
|
<Controller
|
||||||
|
name="email"
|
||||||
|
control={control}
|
||||||
|
rules={{
|
||||||
|
required: 'Email Address is required',
|
||||||
|
pattern: {
|
||||||
|
value: EMAIL_REGEX,
|
||||||
|
message: 'Please enter a valid email address',
|
||||||
|
},
|
||||||
|
}}
|
||||||
|
render={({ field }): JSX.Element => (
|
||||||
|
<Input
|
||||||
|
id="sa-email"
|
||||||
|
type="email"
|
||||||
|
placeholder="email@example.com"
|
||||||
|
className="create-sa-form__input"
|
||||||
|
value={field.value}
|
||||||
|
onChange={field.onChange}
|
||||||
|
onBlur={field.onBlur}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
{errors.email && (
|
||||||
|
<p className="create-sa-form__error">{errors.email.message}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<p className="create-sa-form__helper">
|
||||||
|
Used only for notifications about this service account. It is not used for
|
||||||
|
authentication.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<div className="create-sa-form__item">
|
||||||
|
<label htmlFor="sa-roles">Roles</label>
|
||||||
|
<Controller
|
||||||
|
name="roles"
|
||||||
|
control={control}
|
||||||
|
rules={{
|
||||||
|
validate: (value): string | true =>
|
||||||
|
value.length > 0 || 'At least one role is required',
|
||||||
|
}}
|
||||||
|
render={({ field }): JSX.Element => (
|
||||||
|
<RolesSelect
|
||||||
|
id="sa-roles"
|
||||||
|
mode="multiple"
|
||||||
|
roles={roles}
|
||||||
|
loading={rolesLoading}
|
||||||
|
isError={rolesError}
|
||||||
|
error={rolesErrorObj}
|
||||||
|
onRefetch={refetchRoles}
|
||||||
|
placeholder="Select roles"
|
||||||
|
value={field.value}
|
||||||
|
onChange={field.onChange}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
{errors.roles && (
|
||||||
|
<p className="create-sa-form__error">{errors.roles.message}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<DialogFooter className="create-sa-modal__footer">
|
||||||
|
<Button
|
||||||
|
type="button"
|
||||||
|
variant="solid"
|
||||||
|
color="secondary"
|
||||||
|
size="sm"
|
||||||
|
onClick={handleClose}
|
||||||
|
>
|
||||||
|
<X size={12} />
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
type="submit"
|
||||||
|
form="create-sa-form"
|
||||||
|
variant="solid"
|
||||||
|
color="primary"
|
||||||
|
size="sm"
|
||||||
|
loading={isSubmitting}
|
||||||
|
disabled={!isValid}
|
||||||
|
>
|
||||||
|
Create Service Account
|
||||||
|
</Button>
|
||||||
|
</DialogFooter>
|
||||||
|
</DialogWrapper>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default CreateServiceAccountModal;
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
import { toast } from '@signozhq/sonner';
|
||||||
|
import { listRolesSuccessResponse } from 'mocks-server/__mockdata__/roles';
|
||||||
|
import { rest, server } from 'mocks-server/server';
|
||||||
|
import { NuqsTestingAdapter } from 'nuqs/adapters/testing';
|
||||||
|
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||||
|
|
||||||
|
import CreateServiceAccountModal from '../CreateServiceAccountModal';
|
||||||
|
|
||||||
|
jest.mock('@signozhq/sonner', () => ({
|
||||||
|
toast: { success: jest.fn(), error: jest.fn() },
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockToast = jest.mocked(toast);
|
||||||
|
|
||||||
|
const ROLES_ENDPOINT = '*/api/v1/roles';
|
||||||
|
const SERVICE_ACCOUNTS_ENDPOINT = '*/api/v1/service_accounts';
|
||||||
|
|
||||||
|
function renderModal(): ReturnType<typeof render> {
|
||||||
|
return render(
|
||||||
|
<NuqsTestingAdapter searchParams={{ 'create-sa': 'true' }} hasMemory>
|
||||||
|
<CreateServiceAccountModal />
|
||||||
|
</NuqsTestingAdapter>,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('CreateServiceAccountModal', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks();
|
||||||
|
server.use(
|
||||||
|
rest.get(ROLES_ENDPOINT, (_, res, ctx) =>
|
||||||
|
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||||
|
),
|
||||||
|
rest.post(SERVICE_ACCOUNTS_ENDPOINT, (_, res, ctx) =>
|
||||||
|
res(ctx.status(201), ctx.json({ status: 'success', data: {} })),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
server.resetHandlers();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('submit button is disabled when form is empty', () => {
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.getByRole('button', { name: /Create Service Account/i }),
|
||||||
|
).toBeDisabled();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('submit button remains disabled when email is invalid', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
await user.type(screen.getByPlaceholderText('Enter a name'), 'My Bot');
|
||||||
|
await user.type(
|
||||||
|
screen.getByPlaceholderText('email@example.com'),
|
||||||
|
'not-an-email',
|
||||||
|
);
|
||||||
|
|
||||||
|
await user.click(screen.getByText('Select roles'));
|
||||||
|
await user.click(await screen.findByTitle('signoz-admin'));
|
||||||
|
|
||||||
|
await waitFor(() =>
|
||||||
|
expect(
|
||||||
|
screen.getByRole('button', { name: /Create Service Account/i }),
|
||||||
|
).toBeDisabled(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('successful submit shows toast.success and closes modal', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
await user.type(screen.getByPlaceholderText('Enter a name'), 'Deploy Bot');
|
||||||
|
await user.type(
|
||||||
|
screen.getByPlaceholderText('email@example.com'),
|
||||||
|
'deploy@acme.io',
|
||||||
|
);
|
||||||
|
|
||||||
|
await user.click(screen.getByText('Select roles'));
|
||||||
|
await user.click(await screen.findByTitle('signoz-admin'));
|
||||||
|
|
||||||
|
const submitBtn = screen.getByRole('button', {
|
||||||
|
name: /Create Service Account/i,
|
||||||
|
});
|
||||||
|
await waitFor(() => expect(submitBtn).not.toBeDisabled());
|
||||||
|
await user.click(submitBtn);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockToast.success).toHaveBeenCalledWith(
|
||||||
|
'Service account created successfully',
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(
|
||||||
|
screen.queryByRole('dialog', { name: /New Service Account/i }),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows toast.error on API error and keeps modal open', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
|
||||||
|
server.use(
|
||||||
|
rest.post(SERVICE_ACCOUNTS_ENDPOINT, (_, res, ctx) =>
|
||||||
|
res(
|
||||||
|
ctx.status(500),
|
||||||
|
ctx.json({ status: 'error', error: 'Internal Server Error' }),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
await user.type(screen.getByPlaceholderText('Enter a name'), 'Dupe Bot');
|
||||||
|
await user.type(
|
||||||
|
screen.getByPlaceholderText('email@example.com'),
|
||||||
|
'dupe@acme.io',
|
||||||
|
);
|
||||||
|
|
||||||
|
await user.click(screen.getByText('Select roles'));
|
||||||
|
await user.click(await screen.findByTitle('signoz-admin'));
|
||||||
|
|
||||||
|
const submitBtn = screen.getByRole('button', {
|
||||||
|
name: /Create Service Account/i,
|
||||||
|
});
|
||||||
|
await waitFor(() => expect(submitBtn).not.toBeDisabled());
|
||||||
|
await user.click(submitBtn);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockToast.error).toHaveBeenCalledWith(
|
||||||
|
expect.stringMatching(/Failed to create service account/i),
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.getByRole('dialog', { name: /New Service Account/i }),
|
||||||
|
).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Cancel button closes modal without submitting', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
await screen.findByRole('dialog', { name: /New Service Account/i });
|
||||||
|
await user.click(screen.getByRole('button', { name: /Cancel/i }));
|
||||||
|
|
||||||
|
expect(
|
||||||
|
screen.queryByRole('dialog', { name: /New Service Account/i }),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows "Name is required" after clearing the name field', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
const nameInput = screen.getByPlaceholderText('Enter a name');
|
||||||
|
await user.type(nameInput, 'Bot');
|
||||||
|
await user.clear(nameInput);
|
||||||
|
|
||||||
|
await screen.findByText('Name is required');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows "Please enter a valid email address" for a malformed email', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
renderModal();
|
||||||
|
|
||||||
|
await user.type(
|
||||||
|
screen.getByPlaceholderText('email@example.com'),
|
||||||
|
'not-an-email',
|
||||||
|
);
|
||||||
|
|
||||||
|
await screen.findByText('Please enter a valid email address');
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -297,7 +297,11 @@ function CustomTimePicker({
|
|||||||
resetErrorStatus();
|
resetErrorStatus();
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleInputPressEnter = (): void => {
|
const handleInputPressEnter = (
|
||||||
|
event?: React.KeyboardEvent<HTMLInputElement>,
|
||||||
|
): void => {
|
||||||
|
event?.preventDefault();
|
||||||
|
event?.stopPropagation();
|
||||||
// check if the entered time is in the format of 1m, 2h, 3d, 4w
|
// check if the entered time is in the format of 1m, 2h, 3d, 4w
|
||||||
const isTimeDurationShortHandFormat = /^(\d+)([mhdw])$/.test(inputValue);
|
const isTimeDurationShortHandFormat = /^(\d+)([mhdw])$/.test(inputValue);
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
.logs-download-popover {
|
.download-popover {
|
||||||
.ant-popover-inner {
|
.ant-popover-inner {
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
border: 1px solid var(--bg-slate-400);
|
border: 1px solid var(--l3-border);
|
||||||
background: linear-gradient(
|
background: linear-gradient(
|
||||||
139deg,
|
139deg,
|
||||||
var(--bg-ink-400) 0%,
|
var(--l2-background) 0%,
|
||||||
var(--bg-ink-500) 98.68%
|
var(--l3-background) 98.68%
|
||||||
);
|
);
|
||||||
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
box-shadow: 4px 10px 16px 2px rgba(0, 0, 0, 0.2);
|
||||||
backdrop-filter: blur(20px);
|
backdrop-filter: blur(20px);
|
||||||
@@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
.title {
|
.title {
|
||||||
display: flex;
|
display: flex;
|
||||||
color: var(--bg-slate-50);
|
color: var(--l3-foreground);
|
||||||
font-family: Inter;
|
font-family: Inter;
|
||||||
font-size: 11px;
|
font-size: 11px;
|
||||||
font-style: normal;
|
font-style: normal;
|
||||||
@@ -38,7 +38,7 @@
|
|||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
|
|
||||||
:global(.ant-radio-wrapper) {
|
:global(.ant-radio-wrapper) {
|
||||||
color: var(--bg-vanilla-400);
|
color: var(--foreground);
|
||||||
font-family: Inter;
|
font-family: Inter;
|
||||||
font-size: 13px;
|
font-size: 13px;
|
||||||
}
|
}
|
||||||
@@ -46,7 +46,7 @@
|
|||||||
|
|
||||||
.horizontal-line {
|
.horizontal-line {
|
||||||
height: 1px;
|
height: 1px;
|
||||||
background: var(--bg-slate-400);
|
background: var(--l3-border);
|
||||||
}
|
}
|
||||||
|
|
||||||
.export-button {
|
.export-button {
|
||||||
@@ -59,27 +59,27 @@
|
|||||||
}
|
}
|
||||||
|
|
||||||
.lightMode {
|
.lightMode {
|
||||||
.logs-download-popover {
|
.download-popover {
|
||||||
.ant-popover-inner {
|
.ant-popover-inner {
|
||||||
border: 1px solid var(--bg-vanilla-300);
|
border: 1px solid var(--l2-border);
|
||||||
background: linear-gradient(
|
background: linear-gradient(
|
||||||
139deg,
|
139deg,
|
||||||
var(--bg-vanilla-100) 0%,
|
var(--background) 0%,
|
||||||
var(--bg-vanilla-300) 98.68%
|
var(--l1-background) 98.68%
|
||||||
);
|
);
|
||||||
box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2);
|
box-shadow: 4px 10px 16px 2px rgba(255, 255, 255, 0.2);
|
||||||
}
|
}
|
||||||
.export-options-container {
|
.export-options-container {
|
||||||
.title {
|
.title {
|
||||||
color: var(--bg-ink-200);
|
color: var(--l2-foreground);
|
||||||
}
|
}
|
||||||
|
|
||||||
:global(.ant-radio-wrapper) {
|
:global(.ant-radio-wrapper) {
|
||||||
color: var(--bg-ink-400);
|
color: var(--foreground);
|
||||||
}
|
}
|
||||||
|
|
||||||
.horizontal-line {
|
.horizontal-line {
|
||||||
background: var(--bg-vanilla-300);
|
background: var(--l2-border);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -0,0 +1,331 @@
|
|||||||
|
// eslint-disable-next-line no-restricted-imports
|
||||||
|
import { Provider } from 'react-redux';
|
||||||
|
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||||
|
import { message } from 'antd';
|
||||||
|
import configureStore from 'redux-mock-store';
|
||||||
|
import store from 'store';
|
||||||
|
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||||
|
import { EQueryType } from 'types/common/dashboard';
|
||||||
|
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
|
import '@testing-library/jest-dom';
|
||||||
|
|
||||||
|
import { DownloadFormats, DownloadRowCounts } from './constants';
|
||||||
|
import DownloadOptionsMenu from './DownloadOptionsMenu';
|
||||||
|
|
||||||
|
const mockDownloadExportData = jest.fn().mockResolvedValue(undefined);
|
||||||
|
jest.mock('api/v1/download/downloadExportData', () => ({
|
||||||
|
downloadExportData: (...args: any[]): any => mockDownloadExportData(...args),
|
||||||
|
default: (...args: any[]): any => mockDownloadExportData(...args),
|
||||||
|
}));
|
||||||
|
|
||||||
|
jest.mock('antd', () => {
|
||||||
|
const actual = jest.requireActual('antd');
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
message: {
|
||||||
|
success: jest.fn(),
|
||||||
|
error: jest.fn(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const mockUseQueryBuilder = jest.fn();
|
||||||
|
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
|
||||||
|
useQueryBuilder: (): any => mockUseQueryBuilder(),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const mockStore = configureStore([]);
|
||||||
|
const createMockReduxStore = (): any =>
|
||||||
|
mockStore({
|
||||||
|
...store.getState(),
|
||||||
|
});
|
||||||
|
|
||||||
|
const createMockStagedQuery = (dataSource: DataSource): Query => ({
|
||||||
|
id: 'test-query-id',
|
||||||
|
queryType: EQueryType.QUERY_BUILDER,
|
||||||
|
builder: {
|
||||||
|
queryData: [
|
||||||
|
{
|
||||||
|
queryName: 'A',
|
||||||
|
dataSource,
|
||||||
|
aggregateOperator: StringOperators.NOOP,
|
||||||
|
aggregateAttribute: {
|
||||||
|
id: '',
|
||||||
|
dataType: '' as any,
|
||||||
|
key: '',
|
||||||
|
type: '',
|
||||||
|
},
|
||||||
|
aggregations: [{ expression: 'count()' }],
|
||||||
|
functions: [],
|
||||||
|
filter: { expression: 'status = 200' },
|
||||||
|
filters: { items: [], op: 'AND' },
|
||||||
|
groupBy: [],
|
||||||
|
expression: 'A',
|
||||||
|
disabled: false,
|
||||||
|
having: { expression: '' } as any,
|
||||||
|
limit: null,
|
||||||
|
stepInterval: null,
|
||||||
|
orderBy: [{ columnName: 'timestamp', order: 'desc' }],
|
||||||
|
legend: '',
|
||||||
|
selectColumns: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
queryFormulas: [],
|
||||||
|
queryTraceOperator: [],
|
||||||
|
},
|
||||||
|
promql: [],
|
||||||
|
clickhouse_sql: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
const renderWithStore = (dataSource: DataSource): void => {
|
||||||
|
const mockReduxStore = createMockReduxStore();
|
||||||
|
render(
|
||||||
|
<Provider store={mockReduxStore}>
|
||||||
|
<DownloadOptionsMenu dataSource={dataSource} />
|
||||||
|
</Provider>,
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
describe.each([
|
||||||
|
[DataSource.LOGS, 'logs'],
|
||||||
|
[DataSource.TRACES, 'traces'],
|
||||||
|
])('DownloadOptionsMenu for %s', (dataSource, signal) => {
|
||||||
|
const testId = `periscope-btn-download-${dataSource}`;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockDownloadExportData.mockReset().mockResolvedValue(undefined);
|
||||||
|
(message.success as jest.Mock).mockReset();
|
||||||
|
(message.error as jest.Mock).mockReset();
|
||||||
|
mockUseQueryBuilder.mockReturnValue({
|
||||||
|
stagedQuery: createMockStagedQuery(dataSource),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('renders download button', () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
const button = screen.getByTestId(testId);
|
||||||
|
expect(button).toBeInTheDocument();
|
||||||
|
expect(button).toHaveClass('periscope-btn', 'ghost');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows popover with export options when download button is clicked', () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
|
||||||
|
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('FORMAT')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Number of Rows')).toBeInTheDocument();
|
||||||
|
expect(screen.getByText('Columns')).toBeInTheDocument();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows changing export format', () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
|
||||||
|
const csvRadio = screen.getByRole('radio', { name: 'csv' });
|
||||||
|
const jsonlRadio = screen.getByRole('radio', { name: 'jsonl' });
|
||||||
|
|
||||||
|
expect(csvRadio).toBeChecked();
|
||||||
|
fireEvent.click(jsonlRadio);
|
||||||
|
expect(jsonlRadio).toBeChecked();
|
||||||
|
expect(csvRadio).not.toBeChecked();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows changing row limit', () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
|
||||||
|
const tenKRadio = screen.getByRole('radio', { name: '10k' });
|
||||||
|
const fiftyKRadio = screen.getByRole('radio', { name: '50k' });
|
||||||
|
|
||||||
|
expect(tenKRadio).toBeChecked();
|
||||||
|
fireEvent.click(fiftyKRadio);
|
||||||
|
expect(fiftyKRadio).toBeChecked();
|
||||||
|
expect(tenKRadio).not.toBeChecked();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows changing columns scope', () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
|
||||||
|
const allColumnsRadio = screen.getByRole('radio', { name: 'All' });
|
||||||
|
const selectedColumnsRadio = screen.getByRole('radio', { name: 'Selected' });
|
||||||
|
|
||||||
|
expect(allColumnsRadio).toBeChecked();
|
||||||
|
fireEvent.click(selectedColumnsRadio);
|
||||||
|
expect(selectedColumnsRadio).toBeChecked();
|
||||||
|
expect(allColumnsRadio).not.toBeChecked();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls downloadExportData with correct format and POST body', async () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||||
|
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||||
|
expect(callArgs.format).toBe(DownloadFormats.CSV);
|
||||||
|
expect(callArgs.body).toBeDefined();
|
||||||
|
expect(callArgs.body.requestType).toBe('raw');
|
||||||
|
expect(callArgs.body.compositeQuery.queries).toHaveLength(1);
|
||||||
|
|
||||||
|
const query = callArgs.body.compositeQuery.queries[0];
|
||||||
|
expect(query.type).toBe('builder_query');
|
||||||
|
expect(query.spec.signal).toBe(signal);
|
||||||
|
expect(query.spec.limit).toBe(DownloadRowCounts.TEN_K);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clears groupBy and having in the export payload', async () => {
|
||||||
|
const mockQuery = createMockStagedQuery(dataSource);
|
||||||
|
mockQuery.builder.queryData[0].groupBy = [
|
||||||
|
{ key: 'service', dataType: 'string' as any, type: '' },
|
||||||
|
];
|
||||||
|
mockQuery.builder.queryData[0].having = {
|
||||||
|
expression: 'count() > 10',
|
||||||
|
} as any;
|
||||||
|
|
||||||
|
mockUseQueryBuilder.mockReturnValue({ stagedQuery: mockQuery });
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||||
|
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||||
|
const query = callArgs.body.compositeQuery.queries[0];
|
||||||
|
expect(query.spec.groupBy).toBeUndefined();
|
||||||
|
expect(query.spec.having).toEqual({ expression: '' });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('keeps selectColumns when column scope is Selected', async () => {
|
||||||
|
const mockQuery = createMockStagedQuery(dataSource);
|
||||||
|
mockQuery.builder.queryData[0].selectColumns = [
|
||||||
|
{ name: 'http.status', fieldDataType: 'int64', fieldContext: 'attribute' },
|
||||||
|
] as any;
|
||||||
|
|
||||||
|
mockUseQueryBuilder.mockReturnValue({ stagedQuery: mockQuery });
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||||
|
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||||
|
const query = callArgs.body.compositeQuery.queries[0];
|
||||||
|
expect(query.spec.selectFields).toEqual([
|
||||||
|
expect.objectContaining({
|
||||||
|
name: 'http.status',
|
||||||
|
fieldDataType: 'int64',
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sends no selectFields when column scope is All', async () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByRole('radio', { name: 'All' }));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||||
|
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||||
|
const query = callArgs.body.compositeQuery.queries[0];
|
||||||
|
expect(query.spec.selectFields).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles successful export with success message', async () => {
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(message.success).toHaveBeenCalledWith(
|
||||||
|
'Export completed successfully',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles export failure with error message', async () => {
|
||||||
|
mockDownloadExportData.mockRejectedValueOnce(new Error('Server error'));
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(message.error).toHaveBeenCalledWith(
|
||||||
|
`Failed to export ${dataSource}. Please try again.`,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles UI state correctly during export process', async () => {
|
||||||
|
let resolveDownload: () => void;
|
||||||
|
mockDownloadExportData.mockImplementationOnce(
|
||||||
|
() =>
|
||||||
|
new Promise<void>((resolve) => {
|
||||||
|
resolveDownload = resolve;
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
||||||
|
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
expect(screen.getByTestId(testId)).toBeDisabled();
|
||||||
|
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
||||||
|
|
||||||
|
resolveDownload!();
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(screen.getByTestId(testId)).not.toBeDisabled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('DownloadOptionsMenu for traces with queryTraceOperator', () => {
|
||||||
|
const dataSource = DataSource.TRACES;
|
||||||
|
const testId = `periscope-btn-download-${dataSource}`;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockDownloadExportData.mockReset().mockResolvedValue(undefined);
|
||||||
|
(message.success as jest.Mock).mockReset();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('applies limit and clears groupBy on queryTraceOperator entries', async () => {
|
||||||
|
const query = createMockStagedQuery(dataSource);
|
||||||
|
query.builder.queryTraceOperator = [
|
||||||
|
{
|
||||||
|
...query.builder.queryData[0],
|
||||||
|
queryName: 'TraceOp1',
|
||||||
|
expression: 'TraceOp1',
|
||||||
|
groupBy: [{ key: 'service', dataType: 'string' as any, type: '' }],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
mockUseQueryBuilder.mockReturnValue({ stagedQuery: query });
|
||||||
|
renderWithStore(dataSource);
|
||||||
|
fireEvent.click(screen.getByTestId(testId));
|
||||||
|
fireEvent.click(screen.getByRole('radio', { name: '50k' }));
|
||||||
|
fireEvent.click(screen.getByText('Export'));
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockDownloadExportData).toHaveBeenCalledTimes(1);
|
||||||
|
const callArgs = mockDownloadExportData.mock.calls[0][0];
|
||||||
|
const queries = callArgs.body.compositeQuery.queries;
|
||||||
|
const traceOpQuery = queries.find((q: any) => q.spec.name === 'TraceOp1');
|
||||||
|
if (traceOpQuery) {
|
||||||
|
expect(traceOpQuery.spec.limit).toBe(DownloadRowCounts.FIFTY_K);
|
||||||
|
expect(traceOpQuery.spec.groupBy).toBeUndefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -1,8 +1,8 @@
|
|||||||
import { useCallback, useMemo, useState } from 'react';
|
import { useCallback, useMemo, useState } from 'react';
|
||||||
import { Button, message, Popover, Radio, Tooltip, Typography } from 'antd';
|
import { Button, Popover, Radio, Tooltip, Typography } from 'antd';
|
||||||
import { downloadExportData } from 'api/v1/download/downloadExportData';
|
import { useExportRawData } from 'hooks/useDownloadOptionsMenu/useDownloadOptionsMenu';
|
||||||
import { Download, DownloadIcon, Loader2 } from 'lucide-react';
|
import { Download, DownloadIcon, Loader2 } from 'lucide-react';
|
||||||
import { TelemetryFieldKey } from 'types/api/v5/queryRange';
|
import { DataSource } from 'types/common/queryBuilder';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
DownloadColumnsScopes,
|
DownloadColumnsScopes,
|
||||||
@@ -10,75 +10,34 @@ import {
|
|||||||
DownloadRowCounts,
|
DownloadRowCounts,
|
||||||
} from './constants';
|
} from './constants';
|
||||||
|
|
||||||
import './LogsDownloadOptionsMenu.styles.scss';
|
import './DownloadOptionsMenu.styles.scss';
|
||||||
|
|
||||||
function convertTelemetryFieldKeyToText(key: TelemetryFieldKey): string {
|
interface DownloadOptionsMenuProps {
|
||||||
const prefix = key.fieldContext ? `${key.fieldContext}.` : '';
|
dataSource: DataSource;
|
||||||
const suffix = key.fieldDataType ? `:${key.fieldDataType}` : '';
|
|
||||||
return `${prefix}${key.name}${suffix}`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface LogsDownloadOptionsMenuProps {
|
export default function DownloadOptionsMenu({
|
||||||
startTime: number;
|
dataSource,
|
||||||
endTime: number;
|
}: DownloadOptionsMenuProps): JSX.Element {
|
||||||
filter: string;
|
|
||||||
columns: TelemetryFieldKey[];
|
|
||||||
orderBy: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default function LogsDownloadOptionsMenu({
|
|
||||||
startTime,
|
|
||||||
endTime,
|
|
||||||
filter,
|
|
||||||
columns,
|
|
||||||
orderBy,
|
|
||||||
}: LogsDownloadOptionsMenuProps): JSX.Element {
|
|
||||||
const [exportFormat, setExportFormat] = useState<string>(DownloadFormats.CSV);
|
const [exportFormat, setExportFormat] = useState<string>(DownloadFormats.CSV);
|
||||||
const [rowLimit, setRowLimit] = useState<number>(DownloadRowCounts.TEN_K);
|
const [rowLimit, setRowLimit] = useState<number>(DownloadRowCounts.TEN_K);
|
||||||
const [columnsScope, setColumnsScope] = useState<string>(
|
const [columnsScope, setColumnsScope] = useState<string>(
|
||||||
DownloadColumnsScopes.ALL,
|
DownloadColumnsScopes.ALL,
|
||||||
);
|
);
|
||||||
const [isDownloading, setIsDownloading] = useState<boolean>(false);
|
|
||||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||||
|
|
||||||
const handleExportRawData = useCallback(async (): Promise<void> => {
|
const { isDownloading, handleExportRawData } = useExportRawData({
|
||||||
setIsPopoverOpen(false);
|
dataSource,
|
||||||
try {
|
});
|
||||||
setIsDownloading(true);
|
|
||||||
const downloadOptions = {
|
|
||||||
source: 'logs',
|
|
||||||
start: startTime,
|
|
||||||
end: endTime,
|
|
||||||
columns:
|
|
||||||
columnsScope === DownloadColumnsScopes.SELECTED
|
|
||||||
? columns.map((col) => convertTelemetryFieldKeyToText(col))
|
|
||||||
: [],
|
|
||||||
filter,
|
|
||||||
orderBy,
|
|
||||||
format: exportFormat,
|
|
||||||
limit: rowLimit,
|
|
||||||
};
|
|
||||||
|
|
||||||
await downloadExportData(downloadOptions);
|
const handleExport = useCallback(async (): Promise<void> => {
|
||||||
message.success('Export completed successfully');
|
setIsPopoverOpen(false);
|
||||||
} catch (error) {
|
await handleExportRawData({
|
||||||
console.error('Error exporting logs:', error);
|
format: exportFormat,
|
||||||
message.error('Failed to export logs. Please try again.');
|
rowLimit,
|
||||||
} finally {
|
clearSelectColumns: columnsScope === DownloadColumnsScopes.ALL,
|
||||||
setIsDownloading(false);
|
});
|
||||||
}
|
}, [exportFormat, rowLimit, columnsScope, handleExportRawData]);
|
||||||
}, [
|
|
||||||
startTime,
|
|
||||||
endTime,
|
|
||||||
columnsScope,
|
|
||||||
columns,
|
|
||||||
filter,
|
|
||||||
orderBy,
|
|
||||||
exportFormat,
|
|
||||||
rowLimit,
|
|
||||||
setIsDownloading,
|
|
||||||
setIsPopoverOpen,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const popoverContent = useMemo(
|
const popoverContent = useMemo(
|
||||||
() => (
|
() => (
|
||||||
@@ -129,7 +88,7 @@ export default function LogsDownloadOptionsMenu({
|
|||||||
<Button
|
<Button
|
||||||
type="primary"
|
type="primary"
|
||||||
icon={<Download size={16} />}
|
icon={<Download size={16} />}
|
||||||
onClick={handleExportRawData}
|
onClick={handleExport}
|
||||||
className="export-button"
|
className="export-button"
|
||||||
disabled={isDownloading}
|
disabled={isDownloading}
|
||||||
loading={isDownloading}
|
loading={isDownloading}
|
||||||
@@ -138,7 +97,7 @@ export default function LogsDownloadOptionsMenu({
|
|||||||
</Button>
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
),
|
),
|
||||||
[exportFormat, rowLimit, columnsScope, isDownloading, handleExportRawData],
|
[exportFormat, rowLimit, columnsScope, isDownloading, handleExport],
|
||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -149,19 +108,19 @@ export default function LogsDownloadOptionsMenu({
|
|||||||
arrow={false}
|
arrow={false}
|
||||||
open={isPopoverOpen}
|
open={isPopoverOpen}
|
||||||
onOpenChange={setIsPopoverOpen}
|
onOpenChange={setIsPopoverOpen}
|
||||||
rootClassName="logs-download-popover"
|
rootClassName="download-popover"
|
||||||
>
|
>
|
||||||
<Tooltip title="Download" placement="top">
|
<Tooltip title="Download" placement="top">
|
||||||
<Button
|
<Button
|
||||||
className="periscope-btn ghost"
|
className="periscope-btn ghost"
|
||||||
icon={
|
icon={
|
||||||
isDownloading ? (
|
isDownloading ? (
|
||||||
<Loader2 size={18} className="animate-spin" />
|
<Loader2 size={14} className="animate-spin" />
|
||||||
) : (
|
) : (
|
||||||
<DownloadIcon size={15} />
|
<DownloadIcon size={14} />
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
data-testid="periscope-btn-download-options"
|
data-testid={`periscope-btn-download-${dataSource}`}
|
||||||
disabled={isDownloading}
|
disabled={isDownloading}
|
||||||
/>
|
/>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
@@ -7,7 +7,6 @@ import {
|
|||||||
Check,
|
Check,
|
||||||
ChevronDown,
|
ChevronDown,
|
||||||
Copy,
|
Copy,
|
||||||
Link,
|
|
||||||
LockKeyhole,
|
LockKeyhole,
|
||||||
RefreshCw,
|
RefreshCw,
|
||||||
Trash2,
|
Trash2,
|
||||||
@@ -16,18 +15,21 @@ import {
|
|||||||
import { Input } from '@signozhq/input';
|
import { Input } from '@signozhq/input';
|
||||||
import { toast } from '@signozhq/sonner';
|
import { toast } from '@signozhq/sonner';
|
||||||
import { Select } from 'antd';
|
import { Select } from 'antd';
|
||||||
import getResetPasswordToken from 'api/v1/factor_password/getResetPasswordToken';
|
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||||
import sendInvite from 'api/v1/invite/create';
|
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
import cancelInvite from 'api/v1/invite/id/delete';
|
import {
|
||||||
import deleteUser from 'api/v1/user/id/delete';
|
getResetPasswordToken,
|
||||||
import update from 'api/v1/user/id/update';
|
useDeleteUser,
|
||||||
|
useUpdateUser,
|
||||||
|
} from 'api/generated/services/users';
|
||||||
|
import { AxiosError } from 'axios';
|
||||||
import { MemberRow } from 'components/MembersTable/MembersTable';
|
import { MemberRow } from 'components/MembersTable/MembersTable';
|
||||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||||
import ROUTES from 'constants/routes';
|
import { MemberStatus } from 'container/MembersSettings/utils';
|
||||||
import { INVITE_PREFIX, MemberStatus } from 'container/MembersSettings/utils';
|
|
||||||
import { capitalize } from 'lodash-es';
|
import { capitalize } from 'lodash-es';
|
||||||
import { useTimezone } from 'providers/Timezone';
|
import { useTimezone } from 'providers/Timezone';
|
||||||
import { ROLES } from 'types/roles';
|
import { ROLES } from 'types/roles';
|
||||||
|
import { popupContainer } from 'utils/selectPopupContainer';
|
||||||
|
|
||||||
import './EditMemberDrawer.styles.scss';
|
import './EditMemberDrawer.styles.scss';
|
||||||
|
|
||||||
@@ -36,7 +38,6 @@ export interface EditMemberDrawerProps {
|
|||||||
open: boolean;
|
open: boolean;
|
||||||
onClose: () => void;
|
onClose: () => void;
|
||||||
onComplete: () => void;
|
onComplete: () => void;
|
||||||
onRefetch?: () => void;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||||
@@ -45,24 +46,62 @@ function EditMemberDrawer({
|
|||||||
open,
|
open,
|
||||||
onClose,
|
onClose,
|
||||||
onComplete,
|
onComplete,
|
||||||
onRefetch,
|
|
||||||
}: EditMemberDrawerProps): JSX.Element {
|
}: EditMemberDrawerProps): JSX.Element {
|
||||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||||
|
|
||||||
const [displayName, setDisplayName] = useState('');
|
const [displayName, setDisplayName] = useState('');
|
||||||
const [selectedRole, setSelectedRole] = useState<ROLES>('VIEWER');
|
const [selectedRole, setSelectedRole] = useState<ROLES>('VIEWER');
|
||||||
const [isSaving, setIsSaving] = useState(false);
|
|
||||||
const [isDeleting, setIsDeleting] = useState(false);
|
|
||||||
const [isGeneratingLink, setIsGeneratingLink] = useState(false);
|
const [isGeneratingLink, setIsGeneratingLink] = useState(false);
|
||||||
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
const [showDeleteConfirm, setShowDeleteConfirm] = useState(false);
|
||||||
const [resetLink, setResetLink] = useState<string | null>(null);
|
const [resetLink, setResetLink] = useState<string | null>(null);
|
||||||
const [showResetLinkDialog, setShowResetLinkDialog] = useState(false);
|
const [showResetLinkDialog, setShowResetLinkDialog] = useState(false);
|
||||||
const [hasCopiedResetLink, setHasCopiedResetLink] = useState(false);
|
const [hasCopiedResetLink, setHasCopiedResetLink] = useState(false);
|
||||||
|
const [linkType, setLinkType] = useState<'invite' | 'reset' | null>(null);
|
||||||
|
|
||||||
const isInvited = member?.status === MemberStatus.Invited;
|
const isInvited = member?.status === MemberStatus.Invited;
|
||||||
// Invited member IDs are prefixed with 'invite-'; strip it to get the real invite ID
|
|
||||||
const inviteId =
|
const { mutate: updateUser, isLoading: isSaving } = useUpdateUser({
|
||||||
isInvited && member ? member.id.slice(INVITE_PREFIX.length) : null;
|
mutation: {
|
||||||
|
onSuccess: (): void => {
|
||||||
|
toast.success('Member details updated successfully', { richColors: true });
|
||||||
|
onComplete();
|
||||||
|
onClose();
|
||||||
|
},
|
||||||
|
onError: (err): void => {
|
||||||
|
const errMessage =
|
||||||
|
convertToApiError(
|
||||||
|
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||||
|
)?.getErrorMessage() || 'An error occurred';
|
||||||
|
toast.error(`Failed to update member details: ${errMessage}`, {
|
||||||
|
richColors: true,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const { mutate: deleteUser, isLoading: isDeleting } = useDeleteUser({
|
||||||
|
mutation: {
|
||||||
|
onSuccess: (): void => {
|
||||||
|
toast.success(
|
||||||
|
isInvited ? 'Invite revoked successfully' : 'Member deleted successfully',
|
||||||
|
{ richColors: true },
|
||||||
|
);
|
||||||
|
setShowDeleteConfirm(false);
|
||||||
|
onComplete();
|
||||||
|
onClose();
|
||||||
|
},
|
||||||
|
onError: (err): void => {
|
||||||
|
const errMessage =
|
||||||
|
convertToApiError(
|
||||||
|
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||||
|
)?.getErrorMessage() || 'An error occurred';
|
||||||
|
const prefix = isInvited
|
||||||
|
? 'Failed to revoke invite'
|
||||||
|
: 'Failed to delete member';
|
||||||
|
toast.error(`${prefix}: ${errMessage}`, { richColors: true });
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (member) {
|
if (member) {
|
||||||
@@ -73,7 +112,7 @@ function EditMemberDrawer({
|
|||||||
|
|
||||||
const isDirty =
|
const isDirty =
|
||||||
member !== null &&
|
member !== null &&
|
||||||
(displayName !== member.name || selectedRole !== member.role);
|
(displayName !== (member.name ?? '') || selectedRole !== member.role);
|
||||||
|
|
||||||
const formatTimestamp = useCallback(
|
const formatTimestamp = useCallback(
|
||||||
(ts: string | null | undefined): string => {
|
(ts: string | null | undefined): string => {
|
||||||
@@ -89,106 +128,24 @@ function EditMemberDrawer({
|
|||||||
[formatTimezoneAdjustedTimestamp],
|
[formatTimezoneAdjustedTimestamp],
|
||||||
);
|
);
|
||||||
|
|
||||||
const saveInvitedMember = useCallback(async (): Promise<void> => {
|
const handleSave = useCallback((): void => {
|
||||||
if (!member || !inviteId) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await cancelInvite({ id: inviteId });
|
|
||||||
try {
|
|
||||||
await sendInvite({
|
|
||||||
email: member.email,
|
|
||||||
name: displayName,
|
|
||||||
role: selectedRole,
|
|
||||||
frontendBaseUrl: window.location.origin,
|
|
||||||
});
|
|
||||||
toast.success('Invite updated successfully', { richColors: true });
|
|
||||||
onComplete();
|
|
||||||
onClose();
|
|
||||||
} catch {
|
|
||||||
onRefetch?.();
|
|
||||||
onClose();
|
|
||||||
toast.error(
|
|
||||||
'Failed to send the updated invite. Please re-invite this member.',
|
|
||||||
{ richColors: true },
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}, [
|
|
||||||
member,
|
|
||||||
inviteId,
|
|
||||||
displayName,
|
|
||||||
selectedRole,
|
|
||||||
onComplete,
|
|
||||||
onClose,
|
|
||||||
onRefetch,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const saveActiveMember = useCallback(async (): Promise<void> => {
|
|
||||||
if (!member) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
await update({
|
|
||||||
userId: member.id,
|
|
||||||
displayName,
|
|
||||||
role: selectedRole,
|
|
||||||
});
|
|
||||||
toast.success('Member details updated successfully', { richColors: true });
|
|
||||||
onComplete();
|
|
||||||
onClose();
|
|
||||||
}, [member, displayName, selectedRole, onComplete, onClose]);
|
|
||||||
|
|
||||||
const handleSave = useCallback(async (): Promise<void> => {
|
|
||||||
if (!member || !isDirty) {
|
if (!member || !isDirty) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
setIsSaving(true);
|
updateUser({
|
||||||
try {
|
pathParams: { id: member.id },
|
||||||
if (isInvited && inviteId) {
|
data: { id: member.id, displayName, role: selectedRole },
|
||||||
await saveInvitedMember();
|
});
|
||||||
} else {
|
}, [member, isDirty, displayName, selectedRole, updateUser]);
|
||||||
await saveActiveMember();
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
toast.error(
|
|
||||||
isInvited ? 'Failed to update invite' : 'Failed to update member details',
|
|
||||||
{ richColors: true },
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
setIsSaving(false);
|
|
||||||
}
|
|
||||||
}, [
|
|
||||||
member,
|
|
||||||
isDirty,
|
|
||||||
isInvited,
|
|
||||||
inviteId,
|
|
||||||
saveInvitedMember,
|
|
||||||
saveActiveMember,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const handleDelete = useCallback(async (): Promise<void> => {
|
const handleDelete = useCallback((): void => {
|
||||||
if (!member) {
|
if (!member) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
setIsDeleting(true);
|
deleteUser({
|
||||||
try {
|
pathParams: { id: member.id },
|
||||||
if (isInvited && inviteId) {
|
});
|
||||||
await cancelInvite({ id: inviteId });
|
}, [member, deleteUser]);
|
||||||
toast.success('Invitation cancelled successfully', { richColors: true });
|
|
||||||
} else {
|
|
||||||
await deleteUser({ userId: member.id });
|
|
||||||
toast.success('Member deleted successfully', { richColors: true });
|
|
||||||
}
|
|
||||||
setShowDeleteConfirm(false);
|
|
||||||
onComplete();
|
|
||||||
onClose();
|
|
||||||
} catch {
|
|
||||||
toast.error(
|
|
||||||
isInvited ? 'Failed to cancel invitation' : 'Failed to delete member',
|
|
||||||
{ richColors: true },
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
setIsDeleting(false);
|
|
||||||
}
|
|
||||||
}, [member, isInvited, inviteId, onComplete, onClose]);
|
|
||||||
|
|
||||||
const handleGenerateResetLink = useCallback(async (): Promise<void> => {
|
const handleGenerateResetLink = useCallback(async (): Promise<void> => {
|
||||||
if (!member) {
|
if (!member) {
|
||||||
@@ -196,11 +153,12 @@ function EditMemberDrawer({
|
|||||||
}
|
}
|
||||||
setIsGeneratingLink(true);
|
setIsGeneratingLink(true);
|
||||||
try {
|
try {
|
||||||
const response = await getResetPasswordToken({ userId: member.id });
|
const response = await getResetPasswordToken({ id: member.id });
|
||||||
if (response?.data?.token) {
|
if (response?.data?.token) {
|
||||||
const link = `${window.location.origin}/password-reset?token=${response.data.token}`;
|
const link = `${window.location.origin}/password-reset?token=${response.data.token}`;
|
||||||
setResetLink(link);
|
setResetLink(link);
|
||||||
setHasCopiedResetLink(false);
|
setHasCopiedResetLink(false);
|
||||||
|
setLinkType(isInvited ? 'invite' : 'reset');
|
||||||
setShowResetLinkDialog(true);
|
setShowResetLinkDialog(true);
|
||||||
onClose();
|
onClose();
|
||||||
} else {
|
} else {
|
||||||
@@ -217,7 +175,7 @@ function EditMemberDrawer({
|
|||||||
} finally {
|
} finally {
|
||||||
setIsGeneratingLink(false);
|
setIsGeneratingLink(false);
|
||||||
}
|
}
|
||||||
}, [member, onClose]);
|
}, [member, isInvited, setLinkType, onClose]);
|
||||||
|
|
||||||
const handleCopyResetLink = useCallback(async (): Promise<void> => {
|
const handleCopyResetLink = useCallback(async (): Promise<void> => {
|
||||||
if (!resetLink) {
|
if (!resetLink) {
|
||||||
@@ -227,36 +185,18 @@ function EditMemberDrawer({
|
|||||||
await navigator.clipboard.writeText(resetLink);
|
await navigator.clipboard.writeText(resetLink);
|
||||||
setHasCopiedResetLink(true);
|
setHasCopiedResetLink(true);
|
||||||
setTimeout(() => setHasCopiedResetLink(false), 2000);
|
setTimeout(() => setHasCopiedResetLink(false), 2000);
|
||||||
toast.success('Reset link copied to clipboard', { richColors: true });
|
toast.success(
|
||||||
|
linkType === 'invite'
|
||||||
|
? 'Invite link copied to clipboard'
|
||||||
|
: 'Reset link copied to clipboard',
|
||||||
|
{ richColors: true },
|
||||||
|
);
|
||||||
} catch {
|
} catch {
|
||||||
toast.error('Failed to copy link', {
|
toast.error('Failed to copy link', {
|
||||||
richColors: true,
|
richColors: true,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}, [resetLink]);
|
}, [resetLink, linkType]);
|
||||||
|
|
||||||
const handleCopyInviteLink = useCallback(async (): Promise<void> => {
|
|
||||||
if (!member?.token) {
|
|
||||||
toast.error('Invite link is not available', {
|
|
||||||
richColors: true,
|
|
||||||
position: 'top-right',
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const inviteLink = `${window.location.origin}${ROUTES.SIGN_UP}?token=${member.token}`;
|
|
||||||
try {
|
|
||||||
await navigator.clipboard.writeText(inviteLink);
|
|
||||||
toast.success('Invite link copied to clipboard', {
|
|
||||||
richColors: true,
|
|
||||||
position: 'top-right',
|
|
||||||
});
|
|
||||||
} catch {
|
|
||||||
toast.error('Failed to copy invite link', {
|
|
||||||
richColors: true,
|
|
||||||
position: 'top-right',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, [member]);
|
|
||||||
|
|
||||||
const handleClose = useCallback((): void => {
|
const handleClose = useCallback((): void => {
|
||||||
setShowDeleteConfirm(false);
|
setShowDeleteConfirm(false);
|
||||||
@@ -303,10 +243,7 @@ function EditMemberDrawer({
|
|||||||
onChange={(role): void => setSelectedRole(role as ROLES)}
|
onChange={(role): void => setSelectedRole(role as ROLES)}
|
||||||
className="edit-member-drawer__role-select"
|
className="edit-member-drawer__role-select"
|
||||||
suffixIcon={<ChevronDown size={14} />}
|
suffixIcon={<ChevronDown size={14} />}
|
||||||
getPopupContainer={(triggerNode): HTMLElement =>
|
getPopupContainer={popupContainer}
|
||||||
(triggerNode?.closest('.edit-member-drawer') as HTMLElement) ||
|
|
||||||
document.body
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
<Select.Option value="ADMIN">{capitalize('ADMIN')}</Select.Option>
|
<Select.Option value="ADMIN">{capitalize('ADMIN')}</Select.Option>
|
||||||
<Select.Option value="EDITOR">{capitalize('EDITOR')}</Select.Option>
|
<Select.Option value="EDITOR">{capitalize('EDITOR')}</Select.Option>
|
||||||
@@ -348,30 +285,22 @@ function EditMemberDrawer({
|
|||||||
onClick={(): void => setShowDeleteConfirm(true)}
|
onClick={(): void => setShowDeleteConfirm(true)}
|
||||||
>
|
>
|
||||||
<Trash2 size={12} />
|
<Trash2 size={12} />
|
||||||
{isInvited ? 'Cancel Invite' : 'Delete Member'}
|
{isInvited ? 'Revoke Invite' : 'Delete Member'}
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
<div className="edit-member-drawer__footer-divider" />
|
<div className="edit-member-drawer__footer-divider" />
|
||||||
|
<Button
|
||||||
{isInvited ? (
|
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--warning"
|
||||||
<Button
|
onClick={handleGenerateResetLink}
|
||||||
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--warning"
|
disabled={isGeneratingLink}
|
||||||
onClick={handleCopyInviteLink}
|
>
|
||||||
disabled={!member?.token}
|
<RefreshCw size={12} />
|
||||||
>
|
{isGeneratingLink
|
||||||
<Link size={12} />
|
? 'Generating...'
|
||||||
Copy Invite Link
|
: isInvited
|
||||||
</Button>
|
? 'Copy Invite Link'
|
||||||
) : (
|
: 'Generate Password Reset Link'}
|
||||||
<Button
|
</Button>
|
||||||
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--warning"
|
|
||||||
onClick={handleGenerateResetLink}
|
|
||||||
disabled={isGeneratingLink}
|
|
||||||
>
|
|
||||||
<RefreshCw size={12} />
|
|
||||||
{isGeneratingLink ? 'Generating...' : 'Generate Password Reset Link'}
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="edit-member-drawer__footer-right">
|
<div className="edit-member-drawer__footer-right">
|
||||||
@@ -394,21 +323,21 @@ function EditMemberDrawer({
|
|||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|
||||||
const deleteDialogTitle = isInvited ? 'Cancel Invitation' : 'Delete Member';
|
const deleteDialogTitle = isInvited ? 'Revoke Invite' : 'Delete Member';
|
||||||
const deleteDialogBody = isInvited ? (
|
const deleteDialogBody = isInvited ? (
|
||||||
<>
|
<>
|
||||||
Are you sure you want to cancel the invitation for{' '}
|
Are you sure you want to revoke the invite for{' '}
|
||||||
<strong>{member?.email}</strong>? They will no longer be able to join the
|
<strong>{member?.email}</strong>? They will no longer be able to join the
|
||||||
workspace using this invite.
|
workspace using this invite.
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
Are you sure you want to delete{' '}
|
Are you sure you want to delete{' '}
|
||||||
<strong>{member?.name || member?.email}</strong>? This will permanently
|
<strong>{member?.name || member?.email}</strong>? This will remove their
|
||||||
remove their access to the workspace.
|
access to the workspace.
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
const deleteConfirmLabel = isInvited ? 'Cancel Invite' : 'Delete Member';
|
const deleteConfirmLabel = isInvited ? 'Revoke Invite' : 'Delete Member';
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
@@ -434,17 +363,19 @@ function EditMemberDrawer({
|
|||||||
onOpenChange={(isOpen): void => {
|
onOpenChange={(isOpen): void => {
|
||||||
if (!isOpen) {
|
if (!isOpen) {
|
||||||
setShowResetLinkDialog(false);
|
setShowResetLinkDialog(false);
|
||||||
|
setLinkType(null);
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
title="Password Reset Link"
|
title={linkType === 'invite' ? 'Invite Link' : 'Password Reset Link'}
|
||||||
showCloseButton
|
showCloseButton
|
||||||
width="base"
|
width="base"
|
||||||
className="reset-link-dialog"
|
className="reset-link-dialog"
|
||||||
>
|
>
|
||||||
<div className="reset-link-dialog__content">
|
<div className="reset-link-dialog__content">
|
||||||
<p className="reset-link-dialog__description">
|
<p className="reset-link-dialog__description">
|
||||||
This creates a one-time link the team member can use to set a new password
|
{linkType === 'invite'
|
||||||
for their SigNoz account.
|
? 'Share this one-time link with the team member to complete their account setup.'
|
||||||
|
: 'This creates a one-time link the team member can use to set a new password for their SigNoz account.'}
|
||||||
</p>
|
</p>
|
||||||
<div className="reset-link-dialog__link-row">
|
<div className="reset-link-dialog__link-row">
|
||||||
<div className="reset-link-dialog__link-text-wrap">
|
<div className="reset-link-dialog__link-text-wrap">
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import type { ReactNode } from 'react';
|
import type { ReactNode } from 'react';
|
||||||
import { toast } from '@signozhq/sonner';
|
import { toast } from '@signozhq/sonner';
|
||||||
import getResetPasswordToken from 'api/v1/factor_password/getResetPasswordToken';
|
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||||
import cancelInvite from 'api/v1/invite/id/delete';
|
import {
|
||||||
import deleteUser from 'api/v1/user/id/delete';
|
getResetPasswordToken,
|
||||||
import update from 'api/v1/user/id/update';
|
useDeleteUser,
|
||||||
|
useUpdateUser,
|
||||||
|
} from 'api/generated/services/users';
|
||||||
import { MemberStatus } from 'container/MembersSettings/utils';
|
import { MemberStatus } from 'container/MembersSettings/utils';
|
||||||
import {
|
import {
|
||||||
fireEvent,
|
fireEvent,
|
||||||
@@ -46,11 +48,16 @@ jest.mock('@signozhq/dialog', () => ({
|
|||||||
),
|
),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
jest.mock('api/v1/user/id/update');
|
jest.mock('api/generated/services/users', () => ({
|
||||||
jest.mock('api/v1/user/id/delete');
|
useDeleteUser: jest.fn(),
|
||||||
jest.mock('api/v1/invite/id/delete');
|
useUpdateUser: jest.fn(),
|
||||||
jest.mock('api/v1/invite/create');
|
getResetPasswordToken: jest.fn(),
|
||||||
jest.mock('api/v1/factor_password/getResetPasswordToken');
|
}));
|
||||||
|
|
||||||
|
jest.mock('api/ErrorResponseHandlerForGeneratedAPIs', () => ({
|
||||||
|
convertToApiError: jest.fn(),
|
||||||
|
}));
|
||||||
|
|
||||||
jest.mock('@signozhq/sonner', () => ({
|
jest.mock('@signozhq/sonner', () => ({
|
||||||
toast: {
|
toast: {
|
||||||
success: jest.fn(),
|
success: jest.fn(),
|
||||||
@@ -58,9 +65,8 @@ jest.mock('@signozhq/sonner', () => ({
|
|||||||
},
|
},
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const mockUpdate = jest.mocked(update);
|
const mockUpdateMutate = jest.fn();
|
||||||
const mockDeleteUser = jest.mocked(deleteUser);
|
const mockDeleteMutate = jest.fn();
|
||||||
const mockCancelInvite = jest.mocked(cancelInvite);
|
|
||||||
const mockGetResetPasswordToken = jest.mocked(getResetPasswordToken);
|
const mockGetResetPasswordToken = jest.mocked(getResetPasswordToken);
|
||||||
|
|
||||||
const activeMember = {
|
const activeMember = {
|
||||||
@@ -74,13 +80,12 @@ const activeMember = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const invitedMember = {
|
const invitedMember = {
|
||||||
id: 'invite-abc123',
|
id: 'abc123',
|
||||||
name: '',
|
name: '',
|
||||||
email: 'bob@signoz.io',
|
email: 'bob@signoz.io',
|
||||||
role: 'VIEWER' as ROLES,
|
role: 'VIEWER' as ROLES,
|
||||||
status: MemberStatus.Invited,
|
status: MemberStatus.Invited,
|
||||||
joinedOn: '1700000000000',
|
joinedOn: '1700000000000',
|
||||||
token: 'tok-xyz',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
function renderDrawer(
|
function renderDrawer(
|
||||||
@@ -100,9 +105,14 @@ function renderDrawer(
|
|||||||
describe('EditMemberDrawer', () => {
|
describe('EditMemberDrawer', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
mockUpdate.mockResolvedValue({ httpStatusCode: 200, data: null });
|
(useUpdateUser as jest.Mock).mockReturnValue({
|
||||||
mockDeleteUser.mockResolvedValue({ httpStatusCode: 200, data: null });
|
mutate: mockUpdateMutate,
|
||||||
mockCancelInvite.mockResolvedValue({ httpStatusCode: 200, data: null });
|
isLoading: false,
|
||||||
|
});
|
||||||
|
(useDeleteUser as jest.Mock).mockReturnValue({
|
||||||
|
mutate: mockDeleteMutate,
|
||||||
|
isLoading: false,
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('renders active member details and disables Save when form is not dirty', () => {
|
it('renders active member details and disables Save when form is not dirty', () => {
|
||||||
@@ -120,6 +130,13 @@ describe('EditMemberDrawer', () => {
|
|||||||
const onComplete = jest.fn();
|
const onComplete = jest.fn();
|
||||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
|
||||||
|
(useUpdateUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockUpdateMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onSuccess?.();
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
renderDrawer({ onComplete });
|
renderDrawer({ onComplete });
|
||||||
|
|
||||||
const nameInput = screen.getByDisplayValue('Alice Smith');
|
const nameInput = screen.getByDisplayValue('Alice Smith');
|
||||||
@@ -132,10 +149,10 @@ describe('EditMemberDrawer', () => {
|
|||||||
await user.click(saveBtn);
|
await user.click(saveBtn);
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(mockUpdate).toHaveBeenCalledWith(
|
expect(mockUpdateMutate).toHaveBeenCalledWith(
|
||||||
expect.objectContaining({
|
expect.objectContaining({
|
||||||
userId: 'user-1',
|
pathParams: { id: 'user-1' },
|
||||||
displayName: 'Alice Updated',
|
data: expect.objectContaining({ displayName: 'Alice Updated' }),
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
expect(onComplete).toHaveBeenCalled();
|
expect(onComplete).toHaveBeenCalled();
|
||||||
@@ -146,6 +163,13 @@ describe('EditMemberDrawer', () => {
|
|||||||
const onComplete = jest.fn();
|
const onComplete = jest.fn();
|
||||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
|
||||||
|
(useDeleteUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockDeleteMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onSuccess?.();
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
renderDrawer({ onComplete });
|
renderDrawer({ onComplete });
|
||||||
|
|
||||||
await user.click(screen.getByRole('button', { name: /delete member/i }));
|
await user.click(screen.getByRole('button', { name: /delete member/i }));
|
||||||
@@ -158,45 +182,184 @@ describe('EditMemberDrawer', () => {
|
|||||||
await user.click(confirmBtns[confirmBtns.length - 1]);
|
await user.click(confirmBtns[confirmBtns.length - 1]);
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(mockDeleteUser).toHaveBeenCalledWith({ userId: 'user-1' });
|
expect(mockDeleteMutate).toHaveBeenCalledWith({
|
||||||
|
pathParams: { id: 'user-1' },
|
||||||
|
});
|
||||||
expect(onComplete).toHaveBeenCalled();
|
expect(onComplete).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows Cancel Invite and Copy Invite Link for invited members; hides Last Modified', () => {
|
it('shows revoke invite and copy invite link for invited members; hides Last Modified', () => {
|
||||||
renderDrawer({ member: invitedMember });
|
renderDrawer({ member: invitedMember });
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
screen.getByRole('button', { name: /cancel invite/i }),
|
screen.getByRole('button', { name: /revoke invite/i }),
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
expect(
|
expect(
|
||||||
screen.getByRole('button', { name: /copy invite link/i }),
|
screen.getByRole('button', { name: /copy invite link/i }),
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
|
expect(
|
||||||
|
screen.queryByRole('button', { name: /generate password reset link/i }),
|
||||||
|
).not.toBeInTheDocument();
|
||||||
expect(screen.getByText('Invited On')).toBeInTheDocument();
|
expect(screen.getByText('Invited On')).toBeInTheDocument();
|
||||||
expect(screen.queryByText('Last Modified')).not.toBeInTheDocument();
|
expect(screen.queryByText('Last Modified')).not.toBeInTheDocument();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('calls cancelInvite after confirming Cancel Invite for invited members', async () => {
|
it('calls deleteUser after confirming revoke invite for invited members', async () => {
|
||||||
const onComplete = jest.fn();
|
const onComplete = jest.fn();
|
||||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
|
||||||
|
(useDeleteUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockDeleteMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onSuccess?.();
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
renderDrawer({ member: invitedMember, onComplete });
|
renderDrawer({ member: invitedMember, onComplete });
|
||||||
|
|
||||||
await user.click(screen.getByRole('button', { name: /cancel invite/i }));
|
await user.click(screen.getByRole('button', { name: /revoke invite/i }));
|
||||||
|
|
||||||
expect(
|
expect(
|
||||||
await screen.findByText(/are you sure you want to cancel the invitation/i),
|
await screen.findByText(/Are you sure you want to revoke the invite/i),
|
||||||
).toBeInTheDocument();
|
).toBeInTheDocument();
|
||||||
|
|
||||||
const confirmBtns = screen.getAllByRole('button', { name: /cancel invite/i });
|
const confirmBtns = screen.getAllByRole('button', { name: /revoke invite/i });
|
||||||
await user.click(confirmBtns[confirmBtns.length - 1]);
|
await user.click(confirmBtns[confirmBtns.length - 1]);
|
||||||
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(mockCancelInvite).toHaveBeenCalledWith({ id: 'abc123' });
|
expect(mockDeleteMutate).toHaveBeenCalledWith({
|
||||||
|
pathParams: { id: 'abc123' },
|
||||||
|
});
|
||||||
expect(onComplete).toHaveBeenCalled();
|
expect(onComplete).toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('calls update API when saving changes for an invited member', async () => {
|
||||||
|
const onComplete = jest.fn();
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
|
||||||
|
(useUpdateUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockUpdateMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onSuccess?.();
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
|
renderDrawer({ member: { ...invitedMember, name: 'Bob' }, onComplete });
|
||||||
|
|
||||||
|
const nameInput = screen.getByDisplayValue('Bob');
|
||||||
|
await user.clear(nameInput);
|
||||||
|
await user.type(nameInput, 'Bob Updated');
|
||||||
|
|
||||||
|
const saveBtn = screen.getByRole('button', { name: /save member details/i });
|
||||||
|
await waitFor(() => expect(saveBtn).not.toBeDisabled());
|
||||||
|
await user.click(saveBtn);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockUpdateMutate).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
pathParams: { id: 'abc123' },
|
||||||
|
data: expect.objectContaining({ displayName: 'Bob Updated' }),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
expect(onComplete).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('error handling', () => {
|
||||||
|
const mockConvertToApiError = jest.mocked(convertToApiError);
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mockConvertToApiError.mockReturnValue({
|
||||||
|
getErrorMessage: (): string => 'Something went wrong on server',
|
||||||
|
} as ReturnType<typeof convertToApiError>);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows API error message when updateUser fails', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
const mockToast = jest.mocked(toast);
|
||||||
|
|
||||||
|
(useUpdateUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockUpdateMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onError?.({});
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
|
renderDrawer();
|
||||||
|
|
||||||
|
const nameInput = screen.getByDisplayValue('Alice Smith');
|
||||||
|
await user.clear(nameInput);
|
||||||
|
await user.type(nameInput, 'Alice Updated');
|
||||||
|
|
||||||
|
const saveBtn = screen.getByRole('button', { name: /save member details/i });
|
||||||
|
await waitFor(() => expect(saveBtn).not.toBeDisabled());
|
||||||
|
await user.click(saveBtn);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockToast.error).toHaveBeenCalledWith(
|
||||||
|
'Failed to update member details: Something went wrong on server',
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows API error message when deleteUser fails for active member', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
const mockToast = jest.mocked(toast);
|
||||||
|
|
||||||
|
(useDeleteUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockDeleteMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onError?.({});
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
|
renderDrawer();
|
||||||
|
|
||||||
|
await user.click(screen.getByRole('button', { name: /delete member/i }));
|
||||||
|
const confirmBtns = screen.getAllByRole('button', {
|
||||||
|
name: /delete member/i,
|
||||||
|
});
|
||||||
|
await user.click(confirmBtns[confirmBtns.length - 1]);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockToast.error).toHaveBeenCalledWith(
|
||||||
|
'Failed to delete member: Something went wrong on server',
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows API error message when deleteUser fails for invited member', async () => {
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
const mockToast = jest.mocked(toast);
|
||||||
|
|
||||||
|
(useDeleteUser as jest.Mock).mockImplementation((options) => ({
|
||||||
|
mutate: mockDeleteMutate.mockImplementation(() => {
|
||||||
|
options?.mutation?.onError?.({});
|
||||||
|
}),
|
||||||
|
isLoading: false,
|
||||||
|
}));
|
||||||
|
|
||||||
|
renderDrawer({ member: invitedMember });
|
||||||
|
|
||||||
|
await user.click(screen.getByRole('button', { name: /revoke invite/i }));
|
||||||
|
const confirmBtns = screen.getAllByRole('button', {
|
||||||
|
name: /revoke invite/i,
|
||||||
|
});
|
||||||
|
await user.click(confirmBtns[confirmBtns.length - 1]);
|
||||||
|
|
||||||
|
await waitFor(() => {
|
||||||
|
expect(mockToast.error).toHaveBeenCalledWith(
|
||||||
|
'Failed to revoke invite: Something went wrong on server',
|
||||||
|
expect.anything(),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('Generate Password Reset Link', () => {
|
describe('Generate Password Reset Link', () => {
|
||||||
const mockWriteText = jest.fn().mockResolvedValue(undefined);
|
const mockWriteText = jest.fn().mockResolvedValue(undefined);
|
||||||
let clipboardSpy: jest.SpyInstance | undefined;
|
let clipboardSpy: jest.SpyInstance | undefined;
|
||||||
@@ -215,8 +378,8 @@ describe('EditMemberDrawer', () => {
|
|||||||
.spyOn(navigator.clipboard, 'writeText')
|
.spyOn(navigator.clipboard, 'writeText')
|
||||||
.mockImplementation(mockWriteText);
|
.mockImplementation(mockWriteText);
|
||||||
mockGetResetPasswordToken.mockResolvedValue({
|
mockGetResetPasswordToken.mockResolvedValue({
|
||||||
httpStatusCode: 200,
|
status: 'success',
|
||||||
data: { token: 'reset-tok-abc', userId: 'user-1' },
|
data: { token: 'reset-tok-abc', id: 'user-1' },
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -237,7 +400,7 @@ describe('EditMemberDrawer', () => {
|
|||||||
name: /password reset link/i,
|
name: /password reset link/i,
|
||||||
});
|
});
|
||||||
expect(mockGetResetPasswordToken).toHaveBeenCalledWith({
|
expect(mockGetResetPasswordToken).toHaveBeenCalledWith({
|
||||||
userId: 'user-1',
|
id: 'user-1',
|
||||||
});
|
});
|
||||||
expect(dialog).toBeInTheDocument();
|
expect(dialog).toBeInTheDocument();
|
||||||
expect(dialog).toHaveTextContent('reset-tok-abc');
|
expect(dialog).toHaveTextContent('reset-tok-abc');
|
||||||
@@ -260,7 +423,6 @@ describe('EditMemberDrawer', () => {
|
|||||||
|
|
||||||
fireEvent.click(screen.getByRole('button', { name: /^copy$/i }));
|
fireEvent.click(screen.getByRole('button', { name: /^copy$/i }));
|
||||||
|
|
||||||
// Verify success path: writeText called with the correct link
|
|
||||||
await waitFor(() => {
|
await waitFor(() => {
|
||||||
expect(mockToast.success).toHaveBeenCalledWith(
|
expect(mockToast.success).toHaveBeenCalledWith(
|
||||||
'Reset link copied to clipboard',
|
'Reset link copied to clipboard',
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import { cloneDeep, debounce } from 'lodash-es';
|
|||||||
import APIError from 'types/api/error';
|
import APIError from 'types/api/error';
|
||||||
import { ROLES } from 'types/roles';
|
import { ROLES } from 'types/roles';
|
||||||
import { EMAIL_REGEX } from 'utils/app';
|
import { EMAIL_REGEX } from 'utils/app';
|
||||||
|
import { popupContainer } from 'utils/selectPopupContainer';
|
||||||
import { v4 as uuid } from 'uuid';
|
import { v4 as uuid } from 'uuid';
|
||||||
|
|
||||||
import './InviteMembersModal.styles.scss';
|
import './InviteMembersModal.styles.scss';
|
||||||
@@ -254,6 +255,8 @@ function InviteMembersModal({
|
|||||||
value={row.email}
|
value={row.email}
|
||||||
onChange={(e): void => updateEmail(row.id, e.target.value)}
|
onChange={(e): void => updateEmail(row.id, e.target.value)}
|
||||||
className="team-member-email-input"
|
className="team-member-email-input"
|
||||||
|
name={`invite-email-${row.id}`}
|
||||||
|
autoComplete="email"
|
||||||
/>
|
/>
|
||||||
{emailValidity[row.id] === false && row.email.trim() !== '' && (
|
{emailValidity[row.id] === false && row.email.trim() !== '' && (
|
||||||
<span className="email-error-message">Invalid email address</span>
|
<span className="email-error-message">Invalid email address</span>
|
||||||
@@ -266,10 +269,7 @@ function InviteMembersModal({
|
|||||||
className="team-member-role-select"
|
className="team-member-role-select"
|
||||||
placeholder="Select roles"
|
placeholder="Select roles"
|
||||||
suffixIcon={<ChevronDown size={14} />}
|
suffixIcon={<ChevronDown size={14} />}
|
||||||
getPopupContainer={(triggerNode): HTMLElement =>
|
getPopupContainer={popupContainer}
|
||||||
(triggerNode?.closest('.invite-members-modal') as HTMLElement) ||
|
|
||||||
document.body
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
<Select.Option value="VIEWER">Viewer</Select.Option>
|
<Select.Option value="VIEWER">Viewer</Select.Option>
|
||||||
<Select.Option value="EDITOR">Editor</Select.Option>
|
<Select.Option value="EDITOR">Editor</Select.Option>
|
||||||
|
|||||||
@@ -1,341 +0,0 @@
|
|||||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
|
||||||
import { message } from 'antd';
|
|
||||||
import { ENVIRONMENT } from 'constants/env';
|
|
||||||
import { server } from 'mocks-server/server';
|
|
||||||
import { rest } from 'msw';
|
|
||||||
import { TelemetryFieldKey } from 'types/api/v5/queryRange';
|
|
||||||
|
|
||||||
import '@testing-library/jest-dom';
|
|
||||||
|
|
||||||
import { DownloadFormats, DownloadRowCounts } from './constants';
|
|
||||||
import LogsDownloadOptionsMenu from './LogsDownloadOptionsMenu';
|
|
||||||
|
|
||||||
// Mock antd message
|
|
||||||
jest.mock('antd', () => {
|
|
||||||
const actual = jest.requireActual('antd');
|
|
||||||
return {
|
|
||||||
...actual,
|
|
||||||
message: {
|
|
||||||
success: jest.fn(),
|
|
||||||
error: jest.fn(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const TEST_IDS = {
|
|
||||||
DOWNLOAD_BUTTON: 'periscope-btn-download-options',
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
interface TestProps {
|
|
||||||
startTime: number;
|
|
||||||
endTime: number;
|
|
||||||
filter: string;
|
|
||||||
columns: TelemetryFieldKey[];
|
|
||||||
orderBy: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const createTestProps = (): TestProps => ({
|
|
||||||
startTime: 1631234567890,
|
|
||||||
endTime: 1631234567999,
|
|
||||||
filter: 'status = 200',
|
|
||||||
columns: [
|
|
||||||
{
|
|
||||||
name: 'http.status',
|
|
||||||
fieldContext: 'attribute',
|
|
||||||
fieldDataType: 'int64',
|
|
||||||
} as TelemetryFieldKey,
|
|
||||||
],
|
|
||||||
orderBy: 'timestamp:desc',
|
|
||||||
});
|
|
||||||
|
|
||||||
const testRenderContent = (props: TestProps): void => {
|
|
||||||
render(
|
|
||||||
<LogsDownloadOptionsMenu
|
|
||||||
startTime={props.startTime}
|
|
||||||
endTime={props.endTime}
|
|
||||||
filter={props.filter}
|
|
||||||
columns={props.columns}
|
|
||||||
orderBy={props.orderBy}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const testSuccessResponse = (res: any, ctx: any): any =>
|
|
||||||
res(
|
|
||||||
ctx.status(200),
|
|
||||||
ctx.set('Content-Type', 'application/octet-stream'),
|
|
||||||
ctx.set('Content-Disposition', 'attachment; filename="export.csv"'),
|
|
||||||
ctx.body('id,value\n1,2\n'),
|
|
||||||
);
|
|
||||||
|
|
||||||
describe('LogsDownloadOptionsMenu', () => {
|
|
||||||
const BASE_URL = ENVIRONMENT.baseURL;
|
|
||||||
const EXPORT_URL = `${BASE_URL}/api/v1/export_raw_data`;
|
|
||||||
let requestSpy: jest.Mock<any, any>;
|
|
||||||
const setupDefaultServer = (): void => {
|
|
||||||
server.use(
|
|
||||||
rest.get(EXPORT_URL, (req, res, ctx) => {
|
|
||||||
const params = req.url.searchParams;
|
|
||||||
const payload = {
|
|
||||||
start: Number(params.get('start')),
|
|
||||||
end: Number(params.get('end')),
|
|
||||||
filter: params.get('filter'),
|
|
||||||
columns: params.getAll('columns'),
|
|
||||||
order_by: params.get('order_by'),
|
|
||||||
limit: Number(params.get('limit')),
|
|
||||||
format: params.get('format'),
|
|
||||||
};
|
|
||||||
requestSpy(payload);
|
|
||||||
return testSuccessResponse(res, ctx);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Mock URL.createObjectURL used by download logic
|
|
||||||
const originalCreateObjectURL = URL.createObjectURL;
|
|
||||||
const originalRevokeObjectURL = URL.revokeObjectURL;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
requestSpy = jest.fn();
|
|
||||||
setupDefaultServer();
|
|
||||||
(message.success as jest.Mock).mockReset();
|
|
||||||
(message.error as jest.Mock).mockReset();
|
|
||||||
// jsdom doesn't implement it by default
|
|
||||||
((URL as unknown) as {
|
|
||||||
createObjectURL: (b: Blob) => string;
|
|
||||||
}).createObjectURL = jest.fn(() => 'blob:mock');
|
|
||||||
((URL as unknown) as {
|
|
||||||
revokeObjectURL: (u: string) => void;
|
|
||||||
}).revokeObjectURL = jest.fn();
|
|
||||||
});
|
|
||||||
|
|
||||||
beforeAll(() => {
|
|
||||||
server.listen();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
server.resetHandlers();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
server.close();
|
|
||||||
// restore
|
|
||||||
URL.createObjectURL = originalCreateObjectURL;
|
|
||||||
URL.revokeObjectURL = originalRevokeObjectURL;
|
|
||||||
});
|
|
||||||
|
|
||||||
it('renders download button', () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
const button = screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON);
|
|
||||||
expect(button).toBeInTheDocument();
|
|
||||||
expect(button).toHaveClass('periscope-btn', 'ghost');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows popover with export options when download button is clicked', () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
render(
|
|
||||||
<LogsDownloadOptionsMenu
|
|
||||||
startTime={props.startTime}
|
|
||||||
endTime={props.endTime}
|
|
||||||
filter={props.filter}
|
|
||||||
columns={props.columns}
|
|
||||||
orderBy={props.orderBy}
|
|
||||||
/>,
|
|
||||||
);
|
|
||||||
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
|
|
||||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('FORMAT')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('Number of Rows')).toBeInTheDocument();
|
|
||||||
expect(screen.getByText('Columns')).toBeInTheDocument();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('allows changing export format', () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
|
|
||||||
const csvRadio = screen.getByRole('radio', { name: 'csv' });
|
|
||||||
const jsonlRadio = screen.getByRole('radio', { name: 'jsonl' });
|
|
||||||
|
|
||||||
expect(csvRadio).toBeChecked();
|
|
||||||
fireEvent.click(jsonlRadio);
|
|
||||||
expect(jsonlRadio).toBeChecked();
|
|
||||||
expect(csvRadio).not.toBeChecked();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('allows changing row limit', () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
|
|
||||||
const tenKRadio = screen.getByRole('radio', { name: '10k' });
|
|
||||||
const fiftyKRadio = screen.getByRole('radio', { name: '50k' });
|
|
||||||
|
|
||||||
expect(tenKRadio).toBeChecked();
|
|
||||||
fireEvent.click(fiftyKRadio);
|
|
||||||
expect(fiftyKRadio).toBeChecked();
|
|
||||||
expect(tenKRadio).not.toBeChecked();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('allows changing columns scope', () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
|
|
||||||
const allColumnsRadio = screen.getByRole('radio', { name: 'All' });
|
|
||||||
const selectedColumnsRadio = screen.getByRole('radio', { name: 'Selected' });
|
|
||||||
|
|
||||||
expect(allColumnsRadio).toBeChecked();
|
|
||||||
fireEvent.click(selectedColumnsRadio);
|
|
||||||
expect(selectedColumnsRadio).toBeChecked();
|
|
||||||
expect(allColumnsRadio).not.toBeChecked();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('calls downloadExportData with correct parameters when export button is clicked (Selected columns)', async () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
fireEvent.click(screen.getByRole('radio', { name: 'Selected' }));
|
|
||||||
fireEvent.click(screen.getByText('Export'));
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(requestSpy).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({
|
|
||||||
start: props.startTime,
|
|
||||||
end: props.endTime,
|
|
||||||
columns: ['attribute.http.status:int64'],
|
|
||||||
filter: props.filter,
|
|
||||||
order_by: props.orderBy,
|
|
||||||
format: DownloadFormats.CSV,
|
|
||||||
limit: DownloadRowCounts.TEN_K,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('calls downloadExportData with correct parameters when export button is clicked', async () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
fireEvent.click(screen.getByRole('radio', { name: 'All' }));
|
|
||||||
fireEvent.click(screen.getByText('Export'));
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(requestSpy).toHaveBeenCalledWith(
|
|
||||||
expect.objectContaining({
|
|
||||||
start: props.startTime,
|
|
||||||
end: props.endTime,
|
|
||||||
columns: [],
|
|
||||||
filter: props.filter,
|
|
||||||
order_by: props.orderBy,
|
|
||||||
format: DownloadFormats.CSV,
|
|
||||||
limit: DownloadRowCounts.TEN_K,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles successful export with success message', async () => {
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
fireEvent.click(screen.getByText('Export'));
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(message.success).toHaveBeenCalledWith(
|
|
||||||
'Export completed successfully',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles export failure with error message', async () => {
|
|
||||||
// Override handler to return 500 for this test
|
|
||||||
server.use(rest.get(EXPORT_URL, (_req, res, ctx) => res(ctx.status(500))));
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
fireEvent.click(screen.getByText('Export'));
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(message.error).toHaveBeenCalledWith(
|
|
||||||
'Failed to export logs. Please try again.',
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles UI state correctly during export process', async () => {
|
|
||||||
server.use(
|
|
||||||
rest.get(EXPORT_URL, (_req, res, ctx) => testSuccessResponse(res, ctx)),
|
|
||||||
);
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
// Open popover
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
expect(screen.getByRole('dialog')).toBeInTheDocument();
|
|
||||||
|
|
||||||
// Start export
|
|
||||||
fireEvent.click(screen.getByText('Export'));
|
|
||||||
|
|
||||||
// Check button is disabled during export
|
|
||||||
expect(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON)).toBeDisabled();
|
|
||||||
|
|
||||||
// Check popover is closed immediately after export starts
|
|
||||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
|
|
||||||
|
|
||||||
// Wait for export to complete and verify button is enabled again
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON)).not.toBeDisabled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses filename from Content-Disposition and triggers download click', async () => {
|
|
||||||
server.use(
|
|
||||||
rest.get(EXPORT_URL, (_req, res, ctx) =>
|
|
||||||
res(
|
|
||||||
ctx.status(200),
|
|
||||||
ctx.set('Content-Type', 'application/octet-stream'),
|
|
||||||
ctx.set('Content-Disposition', 'attachment; filename="report.jsonl"'),
|
|
||||||
ctx.body('row\n'),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
const originalCreateElement = document.createElement.bind(document);
|
|
||||||
const anchorEl = originalCreateElement('a') as HTMLAnchorElement;
|
|
||||||
const setAttrSpy = jest.spyOn(anchorEl, 'setAttribute');
|
|
||||||
const clickSpy = jest.spyOn(anchorEl, 'click');
|
|
||||||
const removeSpy = jest.spyOn(anchorEl, 'remove');
|
|
||||||
const createElSpy = jest
|
|
||||||
.spyOn(document, 'createElement')
|
|
||||||
.mockImplementation((tagName: any): any =>
|
|
||||||
tagName === 'a' ? anchorEl : originalCreateElement(tagName),
|
|
||||||
);
|
|
||||||
const appendSpy = jest.spyOn(document.body, 'appendChild');
|
|
||||||
|
|
||||||
const props = createTestProps();
|
|
||||||
testRenderContent(props);
|
|
||||||
|
|
||||||
fireEvent.click(screen.getByTestId(TEST_IDS.DOWNLOAD_BUTTON));
|
|
||||||
fireEvent.click(screen.getByText('Export'));
|
|
||||||
|
|
||||||
await waitFor(() => {
|
|
||||||
expect(appendSpy).toHaveBeenCalledWith(anchorEl);
|
|
||||||
expect(setAttrSpy).toHaveBeenCalledWith('download', 'report.jsonl');
|
|
||||||
expect(clickSpy).toHaveBeenCalled();
|
|
||||||
expect(removeSpy).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
expect(anchorEl.getAttribute('download')).toBe('report.jsonl');
|
|
||||||
|
|
||||||
createElSpy.mockRestore();
|
|
||||||
appendSpy.mockRestore();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -162,7 +162,7 @@
|
|||||||
font-weight: var(--paragraph-base-400-font-weight);
|
font-weight: var(--paragraph-base-400-font-weight);
|
||||||
color: var(--foreground);
|
color: var(--foreground);
|
||||||
margin: 0;
|
margin: 0;
|
||||||
line-height: var(--paragraph-base-400-font-height);
|
line-height: var(--paragraph-base-400-line-height);
|
||||||
|
|
||||||
strong {
|
strong {
|
||||||
font-weight: var(--font-weight-medium);
|
font-weight: var(--font-weight-medium);
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import type React from 'react';
|
import type React from 'react';
|
||||||
import { Badge } from '@signozhq/badge';
|
import { Badge } from '@signozhq/badge';
|
||||||
import { Pagination, Table, Tooltip } from 'antd';
|
import { Table, Tooltip } from 'antd';
|
||||||
import type { ColumnsType, SorterResult } from 'antd/es/table/interface';
|
import type { ColumnsType, SorterResult } from 'antd/es/table/interface';
|
||||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||||
import { MemberStatus } from 'container/MembersSettings/utils';
|
import { MemberStatus } from 'container/MembersSettings/utils';
|
||||||
@@ -18,7 +18,6 @@ export interface MemberRow {
|
|||||||
status: MemberStatus;
|
status: MemberStatus;
|
||||||
joinedOn: string | null;
|
joinedOn: string | null;
|
||||||
updatedAt?: string | null;
|
updatedAt?: string | null;
|
||||||
token?: string | null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
interface MembersTableProps {
|
interface MembersTableProps {
|
||||||
@@ -64,11 +63,23 @@ function StatusBadge({ status }: { status: MemberRow['status'] }): JSX.Element {
|
|||||||
</Badge>
|
</Badge>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return (
|
if (status === MemberStatus.Deleted) {
|
||||||
<Badge color="amber" variant="outline">
|
return (
|
||||||
INVITED
|
<Badge color="cherry" variant="outline">
|
||||||
</Badge>
|
DELETED
|
||||||
);
|
</Badge>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (status === MemberStatus.Invited) {
|
||||||
|
return (
|
||||||
|
<Badge color="amber" variant="outline">
|
||||||
|
INVITED
|
||||||
|
</Badge>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return <Badge color="vanilla">⎯</Badge>;
|
||||||
}
|
}
|
||||||
|
|
||||||
function MembersEmptyState({
|
function MembersEmptyState({
|
||||||
@@ -199,14 +210,30 @@ function MembersTable({
|
|||||||
dataSource={data}
|
dataSource={data}
|
||||||
rowKey="id"
|
rowKey="id"
|
||||||
loading={loading}
|
loading={loading}
|
||||||
pagination={false}
|
pagination={{
|
||||||
|
current: currentPage,
|
||||||
|
pageSize,
|
||||||
|
total,
|
||||||
|
showTotal: showPaginationTotal,
|
||||||
|
showSizeChanger: false,
|
||||||
|
onChange: onPageChange,
|
||||||
|
className: 'members-table-pagination',
|
||||||
|
hideOnSinglePage: true,
|
||||||
|
}}
|
||||||
rowClassName={(_, index): string =>
|
rowClassName={(_, index): string =>
|
||||||
index % 2 === 0 ? 'members-table-row--tinted' : ''
|
index % 2 === 0 ? 'members-table-row--tinted' : ''
|
||||||
}
|
}
|
||||||
onRow={(record): React.HTMLAttributes<HTMLElement> => ({
|
onRow={(record): React.HTMLAttributes<HTMLElement> => {
|
||||||
onClick: (): void => onRowClick?.(record),
|
const isClickable = onRowClick && record.status !== MemberStatus.Deleted;
|
||||||
style: onRowClick ? { cursor: 'pointer' } : undefined,
|
return {
|
||||||
})}
|
onClick: (): void => {
|
||||||
|
if (isClickable) {
|
||||||
|
onRowClick(record);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
style: isClickable ? { cursor: 'pointer' } : undefined,
|
||||||
|
};
|
||||||
|
}}
|
||||||
onChange={(_, __, sorter): void => {
|
onChange={(_, __, sorter): void => {
|
||||||
if (onSortChange) {
|
if (onSortChange) {
|
||||||
onSortChange(
|
onSortChange(
|
||||||
@@ -220,17 +247,6 @@ function MembersTable({
|
|||||||
}}
|
}}
|
||||||
className="members-table"
|
className="members-table"
|
||||||
/>
|
/>
|
||||||
{total > pageSize && (
|
|
||||||
<Pagination
|
|
||||||
current={currentPage}
|
|
||||||
pageSize={pageSize}
|
|
||||||
total={total}
|
|
||||||
showTotal={showPaginationTotal}
|
|
||||||
showSizeChanger={false}
|
|
||||||
onChange={onPageChange}
|
|
||||||
className="members-table-pagination"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,13 +24,12 @@ const mockActiveMembers: MemberRow[] = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
const mockInvitedMember: MemberRow = {
|
const mockInvitedMember: MemberRow = {
|
||||||
id: 'invite-abc',
|
id: 'inv-abc',
|
||||||
name: '',
|
name: '',
|
||||||
email: 'charlie@signoz.io',
|
email: 'charlie@signoz.io',
|
||||||
role: 'EDITOR' as ROLES,
|
role: 'EDITOR' as ROLES,
|
||||||
status: MemberStatus.Invited,
|
status: MemberStatus.Invited,
|
||||||
joinedOn: null,
|
joinedOn: null,
|
||||||
token: 'tok-123',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaultProps = {
|
const defaultProps = {
|
||||||
@@ -93,6 +92,34 @@ describe('MembersTable', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('renders DELETED badge and does not call onRowClick when a deleted member row is clicked', async () => {
|
||||||
|
const onRowClick = jest.fn();
|
||||||
|
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||||
|
const deletedMember: MemberRow = {
|
||||||
|
id: 'user-del',
|
||||||
|
name: 'Dave Deleted',
|
||||||
|
email: 'dave@signoz.io',
|
||||||
|
role: 'VIEWER' as ROLES,
|
||||||
|
status: MemberStatus.Deleted,
|
||||||
|
joinedOn: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
render(
|
||||||
|
<MembersTable
|
||||||
|
{...defaultProps}
|
||||||
|
data={[...mockActiveMembers, deletedMember]}
|
||||||
|
total={3}
|
||||||
|
onRowClick={onRowClick}
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(screen.getByText('DELETED')).toBeInTheDocument();
|
||||||
|
await user.click(screen.getByText('Dave Deleted'));
|
||||||
|
expect(onRowClick).not.toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({ id: 'user-del' }),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
it('shows "No members found" empty state when no data and no search query', () => {
|
it('shows "No members found" empty state when no data and no search query', () => {
|
||||||
render(<MembersTable {...defaultProps} data={[]} total={0} searchQuery="" />);
|
render(<MembersTable {...defaultProps} data={[]} total={0} searchQuery="" />);
|
||||||
|
|
||||||
|
|||||||
90
frontend/src/components/RolesSelect/RolesSelect.styles.scss
Normal file
90
frontend/src/components/RolesSelect/RolesSelect.styles.scss
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
.roles-select {
|
||||||
|
width: 100%;
|
||||||
|
|
||||||
|
// todo: styles should easeup once upgrade to select from periscope
|
||||||
|
.ant-select-selector {
|
||||||
|
min-height: 32px;
|
||||||
|
background-color: var(--l2-background) !important;
|
||||||
|
border: 1px solid var(--border) !important;
|
||||||
|
border-radius: 2px;
|
||||||
|
padding: 2px var(--padding-2) !important;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-select-selection-overflow {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-1);
|
||||||
|
padding: 2px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-select-selection-overflow-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-select-selection-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
height: 22px;
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
color: var(--l1-foreground);
|
||||||
|
background: var(--l3-background);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 2px;
|
||||||
|
padding: 0 var(--padding-1) 0 6px;
|
||||||
|
line-height: var(--line-height-20);
|
||||||
|
letter-spacing: -0.07px;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-select-selection-item-remove {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
color: var(--foreground);
|
||||||
|
margin-left: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-select-selection-placeholder {
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
color: var(--l3-foreground);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-select-arrow {
|
||||||
|
color: var(--foreground);
|
||||||
|
}
|
||||||
|
|
||||||
|
&.ant-select-focused .ant-select-selector,
|
||||||
|
&:not(.ant-select-disabled):hover .ant-select-selector {
|
||||||
|
border-color: var(--primary) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.roles-select-error {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
gap: var(--spacing-3);
|
||||||
|
padding: var(--padding-1) var(--padding-2);
|
||||||
|
color: var(--destructive);
|
||||||
|
font-size: var(--font-size-xs);
|
||||||
|
|
||||||
|
&__msg {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-3);
|
||||||
|
}
|
||||||
|
|
||||||
|
&__retry-btn {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
cursor: pointer;
|
||||||
|
padding: 2px;
|
||||||
|
color: var(--destructive);
|
||||||
|
}
|
||||||
|
}
|
||||||
173
frontend/src/components/RolesSelect/RolesSelect.tsx
Normal file
173
frontend/src/components/RolesSelect/RolesSelect.tsx
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
import { CircleAlert, RefreshCw } from '@signozhq/icons';
|
||||||
|
import { Checkbox, Select } from 'antd';
|
||||||
|
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||||
|
import { useListRoles } from 'api/generated/services/role';
|
||||||
|
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import cx from 'classnames';
|
||||||
|
import APIError from 'types/api/error';
|
||||||
|
import { popupContainer } from 'utils/selectPopupContainer';
|
||||||
|
|
||||||
|
import './RolesSelect.styles.scss';
|
||||||
|
|
||||||
|
export interface RoleOption {
|
||||||
|
label: string;
|
||||||
|
value: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useRoles(): {
|
||||||
|
roles: AuthtypesRoleDTO[];
|
||||||
|
isLoading: boolean;
|
||||||
|
isError: boolean;
|
||||||
|
error: APIError | undefined;
|
||||||
|
refetch: () => void;
|
||||||
|
} {
|
||||||
|
const { data, isLoading, isError, error, refetch } = useListRoles();
|
||||||
|
return {
|
||||||
|
roles: data?.data ?? [],
|
||||||
|
isLoading,
|
||||||
|
isError,
|
||||||
|
error: convertToApiError(error),
|
||||||
|
refetch,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getRoleOptions(roles: AuthtypesRoleDTO[]): RoleOption[] {
|
||||||
|
return roles.map((role) => ({
|
||||||
|
label: role.name ?? '',
|
||||||
|
value: role.name ?? '',
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
function ErrorContent({
|
||||||
|
error,
|
||||||
|
onRefetch,
|
||||||
|
}: {
|
||||||
|
error?: APIError;
|
||||||
|
onRefetch?: () => void;
|
||||||
|
}): JSX.Element {
|
||||||
|
const errorMessage = error?.message || 'Failed to load roles';
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="roles-select-error">
|
||||||
|
<span className="roles-select-error__msg">
|
||||||
|
<CircleAlert size={12} />
|
||||||
|
{errorMessage}
|
||||||
|
</span>
|
||||||
|
{onRefetch && (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={(e): void => {
|
||||||
|
e.stopPropagation();
|
||||||
|
onRefetch();
|
||||||
|
}}
|
||||||
|
className="roles-select-error__retry-btn"
|
||||||
|
title="Retry"
|
||||||
|
>
|
||||||
|
<RefreshCw size={12} />
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BaseProps {
|
||||||
|
id?: string;
|
||||||
|
placeholder?: string;
|
||||||
|
className?: string;
|
||||||
|
getPopupContainer?: (trigger: HTMLElement) => HTMLElement;
|
||||||
|
roles?: AuthtypesRoleDTO[];
|
||||||
|
loading?: boolean;
|
||||||
|
isError?: boolean;
|
||||||
|
error?: APIError;
|
||||||
|
onRefetch?: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SingleProps extends BaseProps {
|
||||||
|
mode?: 'single';
|
||||||
|
value?: string;
|
||||||
|
onChange?: (role: string) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface MultipleProps extends BaseProps {
|
||||||
|
mode: 'multiple';
|
||||||
|
value?: string[];
|
||||||
|
onChange?: (roles: string[]) => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type RolesSelectProps = SingleProps | MultipleProps;
|
||||||
|
|
||||||
|
function RolesSelect(props: RolesSelectProps): JSX.Element {
|
||||||
|
const externalRoles = props.roles;
|
||||||
|
|
||||||
|
const {
|
||||||
|
data,
|
||||||
|
isLoading: internalLoading,
|
||||||
|
isError: internalError,
|
||||||
|
error: internalErrorObj,
|
||||||
|
refetch: internalRefetch,
|
||||||
|
} = useListRoles({
|
||||||
|
query: { enabled: externalRoles === undefined },
|
||||||
|
});
|
||||||
|
|
||||||
|
const roles = externalRoles ?? data?.data ?? [];
|
||||||
|
const options = getRoleOptions(roles);
|
||||||
|
|
||||||
|
const {
|
||||||
|
mode,
|
||||||
|
id,
|
||||||
|
placeholder = 'Select role',
|
||||||
|
className,
|
||||||
|
getPopupContainer = popupContainer,
|
||||||
|
loading = internalLoading,
|
||||||
|
isError = internalError,
|
||||||
|
error = convertToApiError(internalErrorObj),
|
||||||
|
onRefetch = externalRoles === undefined ? internalRefetch : undefined,
|
||||||
|
} = props;
|
||||||
|
|
||||||
|
const notFoundContent = isError ? (
|
||||||
|
<ErrorContent error={error} onRefetch={onRefetch} />
|
||||||
|
) : undefined;
|
||||||
|
|
||||||
|
if (mode === 'multiple') {
|
||||||
|
const { value = [], onChange } = props as MultipleProps;
|
||||||
|
return (
|
||||||
|
<Select
|
||||||
|
id={id}
|
||||||
|
mode="multiple"
|
||||||
|
value={value}
|
||||||
|
onChange={onChange}
|
||||||
|
placeholder={placeholder}
|
||||||
|
className={cx('roles-select', className)}
|
||||||
|
loading={loading}
|
||||||
|
notFoundContent={notFoundContent}
|
||||||
|
options={options}
|
||||||
|
optionRender={(option): JSX.Element => (
|
||||||
|
<Checkbox
|
||||||
|
checked={value.includes(option.value as string)}
|
||||||
|
style={{ pointerEvents: 'none' }}
|
||||||
|
>
|
||||||
|
{option.label}
|
||||||
|
</Checkbox>
|
||||||
|
)}
|
||||||
|
getPopupContainer={getPopupContainer}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { value, onChange } = props as SingleProps;
|
||||||
|
return (
|
||||||
|
<Select
|
||||||
|
id={id}
|
||||||
|
value={value}
|
||||||
|
onChange={onChange}
|
||||||
|
placeholder={placeholder}
|
||||||
|
className={cx('roles-select', className)}
|
||||||
|
loading={loading}
|
||||||
|
notFoundContent={notFoundContent}
|
||||||
|
options={options}
|
||||||
|
getPopupContainer={getPopupContainer}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default RolesSelect;
|
||||||
2
frontend/src/components/RolesSelect/index.ts
Normal file
2
frontend/src/components/RolesSelect/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export type { RoleOption, RolesSelectProps } from './RolesSelect';
|
||||||
|
export { default, getRoleOptions, useRoles } from './RolesSelect';
|
||||||
@@ -0,0 +1,179 @@
|
|||||||
|
.add-key-modal {
|
||||||
|
[data-slot='dialog-description'] {
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-8);
|
||||||
|
padding: var(--padding-4);
|
||||||
|
}
|
||||||
|
|
||||||
|
&__field {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-4);
|
||||||
|
}
|
||||||
|
|
||||||
|
&__label {
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
font-weight: var(--font-weight-normal);
|
||||||
|
color: var(--foreground);
|
||||||
|
line-height: var(--line-height-20);
|
||||||
|
letter-spacing: -0.07px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__input {
|
||||||
|
height: 32px;
|
||||||
|
background: var(--l2-background);
|
||||||
|
border-color: var(--border);
|
||||||
|
color: var(--l1-foreground);
|
||||||
|
box-shadow: none;
|
||||||
|
|
||||||
|
&::placeholder {
|
||||||
|
color: var(--l3-foreground);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__expiry-toggle {
|
||||||
|
width: 60%;
|
||||||
|
display: flex;
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 2px;
|
||||||
|
overflow: hidden;
|
||||||
|
padding: 0;
|
||||||
|
gap: 0;
|
||||||
|
|
||||||
|
[data-slot='toggle-group'] {
|
||||||
|
width: 100%;
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
&-btn {
|
||||||
|
flex: 1;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: 0;
|
||||||
|
font-size: var(--label-small-400-font-size);
|
||||||
|
font-weight: var(--label-small-400-font-weight);
|
||||||
|
line-height: var(--label-small-400-line-height);
|
||||||
|
justify-content: center;
|
||||||
|
background: transparent;
|
||||||
|
border: none;
|
||||||
|
border-right: 1px solid var(--border);
|
||||||
|
color: var(--foreground);
|
||||||
|
|
||||||
|
&:last-child {
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
&[data-state='on'] {
|
||||||
|
background: var(--l2-background);
|
||||||
|
color: var(--l1-foreground);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__datepicker {
|
||||||
|
width: 100%;
|
||||||
|
height: 32px;
|
||||||
|
|
||||||
|
.ant-picker {
|
||||||
|
background: var(--l2-background);
|
||||||
|
border-color: var(--border);
|
||||||
|
border-radius: 2px;
|
||||||
|
width: 100%;
|
||||||
|
height: 32px;
|
||||||
|
|
||||||
|
input {
|
||||||
|
color: var(--l1-foreground);
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.ant-picker-suffix {
|
||||||
|
color: var(--foreground);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.add-key-modal-datepicker-popup {
|
||||||
|
border-radius: 4px;
|
||||||
|
border: 1px solid var(--secondary);
|
||||||
|
background: var(--popover);
|
||||||
|
box-shadow: 0px -4px 16px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__key-display {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
height: 32px;
|
||||||
|
background: var(--l2-background);
|
||||||
|
border: 1px solid var(--border);
|
||||||
|
border-radius: 2px;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__key-text {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 0;
|
||||||
|
padding: 0 var(--padding-2);
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
color: var(--l1-foreground);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__copy-btn {
|
||||||
|
flex-shrink: 0;
|
||||||
|
height: 32px;
|
||||||
|
border-radius: 0 2px 2px 0;
|
||||||
|
border-top: none;
|
||||||
|
border-right: none;
|
||||||
|
border-bottom: none;
|
||||||
|
border-left: 1px solid var(--border);
|
||||||
|
min-width: 40px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__expiry-meta {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-2);
|
||||||
|
}
|
||||||
|
|
||||||
|
&__expiry-label {
|
||||||
|
font-size: var(--font-size-xs);
|
||||||
|
font-weight: var(--font-weight-medium);
|
||||||
|
color: var(--foreground);
|
||||||
|
letter-spacing: 0.48px;
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__footer {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: flex-end;
|
||||||
|
padding: var(--padding-4);
|
||||||
|
border-top: 1px solid var(--secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
&__footer-right {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-4);
|
||||||
|
}
|
||||||
|
|
||||||
|
&__learn-more {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-1);
|
||||||
|
color: var(--primary);
|
||||||
|
font-size: var(--font-size-sm);
|
||||||
|
text-decoration: none;
|
||||||
|
|
||||||
|
&:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,52 @@
|
|||||||
|
import { Badge } from '@signozhq/badge';
|
||||||
|
import { Button } from '@signozhq/button';
|
||||||
|
import { Callout } from '@signozhq/callout';
|
||||||
|
import { Check, Copy } from '@signozhq/icons';
|
||||||
|
import type { ServiceaccounttypesGettableFactorAPIKeyWithKeyDTO } from 'api/generated/services/sigNoz.schemas';
|
||||||
|
|
||||||
|
export interface KeyCreatedPhaseProps {
|
||||||
|
createdKey: ServiceaccounttypesGettableFactorAPIKeyWithKeyDTO;
|
||||||
|
hasCopied: boolean;
|
||||||
|
expiryLabel: string;
|
||||||
|
onCopy: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function KeyCreatedPhase({
|
||||||
|
createdKey,
|
||||||
|
hasCopied,
|
||||||
|
expiryLabel,
|
||||||
|
onCopy,
|
||||||
|
}: KeyCreatedPhaseProps): JSX.Element {
|
||||||
|
return (
|
||||||
|
<div className="add-key-modal__form">
|
||||||
|
<div className="add-key-modal__field">
|
||||||
|
<span className="add-key-modal__label">Key</span>
|
||||||
|
<div className="add-key-modal__key-display">
|
||||||
|
<span className="add-key-modal__key-text">{createdKey.key}</span>
|
||||||
|
<Button
|
||||||
|
variant="outlined"
|
||||||
|
color="secondary"
|
||||||
|
size="sm"
|
||||||
|
onClick={onCopy}
|
||||||
|
className="add-key-modal__copy-btn"
|
||||||
|
>
|
||||||
|
{hasCopied ? <Check size={12} /> : <Copy size={12} />}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="add-key-modal__expiry-meta">
|
||||||
|
<span className="add-key-modal__expiry-label">Expiration</span>
|
||||||
|
<Badge color="vanilla">{expiryLabel}</Badge>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Callout
|
||||||
|
type="info"
|
||||||
|
showIcon
|
||||||
|
message="Store the key securely. This is the only time it will be displayed."
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default KeyCreatedPhase;
|
||||||
@@ -0,0 +1,130 @@
|
|||||||
|
import type { Control, UseFormRegister } from 'react-hook-form';
|
||||||
|
import { Controller } from 'react-hook-form';
|
||||||
|
import { Button } from '@signozhq/button';
|
||||||
|
import { Input } from '@signozhq/input';
|
||||||
|
import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||||
|
import { DatePicker } from 'antd';
|
||||||
|
import { popupContainer } from 'utils/selectPopupContainer';
|
||||||
|
|
||||||
|
import { disabledDate } from '../utils';
|
||||||
|
import type { FormValues } from './types';
|
||||||
|
import { ExpiryMode, FORM_ID } from './types';
|
||||||
|
|
||||||
|
export interface KeyFormPhaseProps {
|
||||||
|
register: UseFormRegister<FormValues>;
|
||||||
|
control: Control<FormValues>;
|
||||||
|
expiryMode: ExpiryMode;
|
||||||
|
isSubmitting: boolean;
|
||||||
|
isValid: boolean;
|
||||||
|
onSubmit: () => void;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
function KeyFormPhase({
|
||||||
|
register,
|
||||||
|
control,
|
||||||
|
expiryMode,
|
||||||
|
isSubmitting,
|
||||||
|
isValid,
|
||||||
|
onSubmit,
|
||||||
|
onClose,
|
||||||
|
}: KeyFormPhaseProps): JSX.Element {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<form id={FORM_ID} className="add-key-modal__form" onSubmit={onSubmit}>
|
||||||
|
<div className="add-key-modal__field">
|
||||||
|
<label className="add-key-modal__label" htmlFor="key-name">
|
||||||
|
Name <span style={{ color: 'var(--destructive)' }}>*</span>
|
||||||
|
</label>
|
||||||
|
<Input
|
||||||
|
id="key-name"
|
||||||
|
placeholder="Enter key name e.g.: Service Owner"
|
||||||
|
className="add-key-modal__input"
|
||||||
|
{...register('keyName', {
|
||||||
|
required: true,
|
||||||
|
validate: (v) => !!v.trim(),
|
||||||
|
})}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="add-key-modal__field">
|
||||||
|
<span className="add-key-modal__label">Expiration</span>
|
||||||
|
<Controller
|
||||||
|
name="expiryMode"
|
||||||
|
control={control}
|
||||||
|
render={({ field }): JSX.Element => (
|
||||||
|
<ToggleGroup
|
||||||
|
type="single"
|
||||||
|
value={field.value}
|
||||||
|
onValueChange={(val): void => {
|
||||||
|
if (val) {
|
||||||
|
field.onChange(val);
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
className="add-key-modal__expiry-toggle"
|
||||||
|
>
|
||||||
|
<ToggleGroupItem
|
||||||
|
value={ExpiryMode.NONE}
|
||||||
|
className="add-key-modal__expiry-toggle-btn"
|
||||||
|
>
|
||||||
|
No Expiration
|
||||||
|
</ToggleGroupItem>
|
||||||
|
<ToggleGroupItem
|
||||||
|
value={ExpiryMode.DATE}
|
||||||
|
className="add-key-modal__expiry-toggle-btn"
|
||||||
|
>
|
||||||
|
Set Expiration Date
|
||||||
|
</ToggleGroupItem>
|
||||||
|
</ToggleGroup>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{expiryMode === ExpiryMode.DATE && (
|
||||||
|
<div className="add-key-modal__field">
|
||||||
|
<label className="add-key-modal__label" htmlFor="expiry-date">
|
||||||
|
Expiration Date
|
||||||
|
</label>
|
||||||
|
<div className="add-key-modal__datepicker">
|
||||||
|
<Controller
|
||||||
|
name="expiryDate"
|
||||||
|
control={control}
|
||||||
|
render={({ field }): JSX.Element => (
|
||||||
|
<DatePicker
|
||||||
|
id="expiry-date"
|
||||||
|
value={field.value}
|
||||||
|
onChange={field.onChange}
|
||||||
|
popupClassName="add-key-modal-datepicker-popup"
|
||||||
|
getPopupContainer={popupContainer}
|
||||||
|
disabledDate={disabledDate}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<div className="add-key-modal__footer">
|
||||||
|
<div className="add-key-modal__footer-right">
|
||||||
|
<Button variant="solid" color="secondary" size="sm" onClick={onClose}>
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
type="submit"
|
||||||
|
form={FORM_ID}
|
||||||
|
variant="solid"
|
||||||
|
color="primary"
|
||||||
|
size="sm"
|
||||||
|
loading={isSubmitting}
|
||||||
|
disabled={!isValid}
|
||||||
|
>
|
||||||
|
Create Key
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default KeyFormPhase;
|
||||||
@@ -0,0 +1,175 @@
|
|||||||
|
import { useCallback, useEffect, useState } from 'react';
|
||||||
|
import { useForm } from 'react-hook-form';
|
||||||
|
import { useQueryClient } from 'react-query';
|
||||||
|
import { DialogWrapper } from '@signozhq/dialog';
|
||||||
|
import { toast } from '@signozhq/sonner';
|
||||||
|
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||||
|
import {
|
||||||
|
invalidateListServiceAccountKeys,
|
||||||
|
useCreateServiceAccountKey,
|
||||||
|
} from 'api/generated/services/serviceaccount';
|
||||||
|
import type {
|
||||||
|
RenderErrorResponseDTO,
|
||||||
|
ServiceaccounttypesGettableFactorAPIKeyWithKeyDTO,
|
||||||
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import { AxiosError } from 'axios';
|
||||||
|
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||||
|
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||||
|
import { parseAsBoolean, useQueryState } from 'nuqs';
|
||||||
|
|
||||||
|
import KeyCreatedPhase from './KeyCreatedPhase';
|
||||||
|
import KeyFormPhase from './KeyFormPhase';
|
||||||
|
import type { FormValues } from './types';
|
||||||
|
import { DEFAULT_FORM_VALUES, ExpiryMode, Phase, PHASE_TITLES } from './types';
|
||||||
|
|
||||||
|
import './AddKeyModal.styles.scss';
|
||||||
|
|
||||||
|
function AddKeyModal(): JSX.Element {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const [accountId] = useQueryState(SA_QUERY_PARAMS.ACCOUNT);
|
||||||
|
const [isAddKeyOpen, setIsAddKeyOpen] = useQueryState(
|
||||||
|
SA_QUERY_PARAMS.ADD_KEY,
|
||||||
|
parseAsBoolean.withDefault(false),
|
||||||
|
);
|
||||||
|
const open = isAddKeyOpen && !!accountId;
|
||||||
|
|
||||||
|
const [phase, setPhase] = useState<Phase>(Phase.FORM);
|
||||||
|
const [
|
||||||
|
createdKey,
|
||||||
|
setCreatedKey,
|
||||||
|
] = useState<ServiceaccounttypesGettableFactorAPIKeyWithKeyDTO | null>(null);
|
||||||
|
const [hasCopied, setHasCopied] = useState(false);
|
||||||
|
|
||||||
|
const {
|
||||||
|
control,
|
||||||
|
register,
|
||||||
|
handleSubmit,
|
||||||
|
reset,
|
||||||
|
watch,
|
||||||
|
formState: { isValid },
|
||||||
|
} = useForm<FormValues>({
|
||||||
|
mode: 'onChange',
|
||||||
|
defaultValues: DEFAULT_FORM_VALUES,
|
||||||
|
});
|
||||||
|
|
||||||
|
const expiryMode = watch('expiryMode');
|
||||||
|
const expiryDate = watch('expiryDate');
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (open) {
|
||||||
|
setPhase(Phase.FORM);
|
||||||
|
setCreatedKey(null);
|
||||||
|
setHasCopied(false);
|
||||||
|
reset();
|
||||||
|
}
|
||||||
|
}, [open, reset]);
|
||||||
|
|
||||||
|
const {
|
||||||
|
mutate: createKey,
|
||||||
|
isLoading: isSubmitting,
|
||||||
|
} = useCreateServiceAccountKey({
|
||||||
|
mutation: {
|
||||||
|
onSuccess: async (response) => {
|
||||||
|
const keyData = response?.data;
|
||||||
|
if (keyData) {
|
||||||
|
setCreatedKey(keyData);
|
||||||
|
setPhase(Phase.CREATED);
|
||||||
|
if (accountId) {
|
||||||
|
await invalidateListServiceAccountKeys(queryClient, { id: accountId });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
const errMessage =
|
||||||
|
convertToApiError(
|
||||||
|
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||||
|
)?.getErrorMessage() || 'Failed to create key';
|
||||||
|
toast.error(errMessage, { richColors: true });
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
function handleCreate({
|
||||||
|
keyName,
|
||||||
|
expiryMode: mode,
|
||||||
|
expiryDate: date,
|
||||||
|
}: FormValues): void {
|
||||||
|
if (!accountId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const expiresAt =
|
||||||
|
mode === ExpiryMode.DATE && date ? date.endOf('day').unix() : 0;
|
||||||
|
createKey({
|
||||||
|
pathParams: { id: accountId },
|
||||||
|
data: { name: keyName.trim(), expiresAt },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleCopy = useCallback(async (): Promise<void> => {
|
||||||
|
if (!createdKey?.key) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await navigator.clipboard.writeText(createdKey.key);
|
||||||
|
setHasCopied(true);
|
||||||
|
setTimeout(() => setHasCopied(false), 2000);
|
||||||
|
toast.success('Key copied to clipboard', { richColors: true });
|
||||||
|
} catch {
|
||||||
|
toast.error('Failed to copy key', { richColors: true });
|
||||||
|
}
|
||||||
|
}, [createdKey]);
|
||||||
|
|
||||||
|
const handleClose = useCallback((): void => {
|
||||||
|
setIsAddKeyOpen(null);
|
||||||
|
}, [setIsAddKeyOpen]);
|
||||||
|
|
||||||
|
function getExpiryLabel(): string {
|
||||||
|
if (expiryMode === ExpiryMode.NONE || !expiryDate) {
|
||||||
|
return 'Never';
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return expiryDate.format(DATE_TIME_FORMATS.MONTH_DATE);
|
||||||
|
} catch {
|
||||||
|
return 'Never';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DialogWrapper
|
||||||
|
open={open}
|
||||||
|
onOpenChange={(isOpen): void => {
|
||||||
|
if (!isOpen) {
|
||||||
|
handleClose();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
title={PHASE_TITLES[phase]}
|
||||||
|
width="base"
|
||||||
|
className="add-key-modal"
|
||||||
|
showCloseButton
|
||||||
|
disableOutsideClick={false}
|
||||||
|
>
|
||||||
|
{phase === Phase.FORM && (
|
||||||
|
<KeyFormPhase
|
||||||
|
register={register}
|
||||||
|
control={control}
|
||||||
|
expiryMode={expiryMode}
|
||||||
|
isSubmitting={isSubmitting}
|
||||||
|
isValid={isValid}
|
||||||
|
onSubmit={handleSubmit(handleCreate)}
|
||||||
|
onClose={handleClose}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{phase === Phase.CREATED && createdKey && (
|
||||||
|
<KeyCreatedPhase
|
||||||
|
createdKey={createdKey}
|
||||||
|
hasCopied={hasCopied}
|
||||||
|
expiryLabel={getExpiryLabel()}
|
||||||
|
onCopy={handleCopy}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</DialogWrapper>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default AddKeyModal;
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
import type { Dayjs } from 'dayjs';
|
||||||
|
|
||||||
|
export const enum Phase {
|
||||||
|
FORM = 'form',
|
||||||
|
CREATED = 'created',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const enum ExpiryMode {
|
||||||
|
NONE = 'none',
|
||||||
|
DATE = 'date',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const FORM_ID = 'add-key-form';
|
||||||
|
|
||||||
|
export const PHASE_TITLES: Record<Phase, string> = {
|
||||||
|
[Phase.FORM]: 'Add a New Key',
|
||||||
|
[Phase.CREATED]: 'Key Created Successfully',
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface FormValues {
|
||||||
|
keyName: string;
|
||||||
|
expiryMode: ExpiryMode;
|
||||||
|
expiryDate: Dayjs | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DEFAULT_FORM_VALUES: FormValues = {
|
||||||
|
keyName: '',
|
||||||
|
expiryMode: ExpiryMode.NONE,
|
||||||
|
expiryDate: null,
|
||||||
|
};
|
||||||
@@ -0,0 +1,109 @@
|
|||||||
|
import { useQueryClient } from 'react-query';
|
||||||
|
import { Button } from '@signozhq/button';
|
||||||
|
import { DialogFooter, DialogWrapper } from '@signozhq/dialog';
|
||||||
|
import { PowerOff, X } from '@signozhq/icons';
|
||||||
|
import { toast } from '@signozhq/sonner';
|
||||||
|
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||||
|
import {
|
||||||
|
getGetServiceAccountQueryKey,
|
||||||
|
invalidateListServiceAccounts,
|
||||||
|
useUpdateServiceAccountStatus,
|
||||||
|
} from 'api/generated/services/serviceaccount';
|
||||||
|
import type {
|
||||||
|
RenderErrorResponseDTO,
|
||||||
|
ServiceaccounttypesServiceAccountDTO,
|
||||||
|
} from 'api/generated/services/sigNoz.schemas';
|
||||||
|
import { AxiosError } from 'axios';
|
||||||
|
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||||
|
import { parseAsBoolean, useQueryState } from 'nuqs';
|
||||||
|
|
||||||
|
function DisableAccountModal(): JSX.Element {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const [accountId, setAccountId] = useQueryState(SA_QUERY_PARAMS.ACCOUNT);
|
||||||
|
const [isDisableOpen, setIsDisableOpen] = useQueryState(
|
||||||
|
SA_QUERY_PARAMS.DISABLE_SA,
|
||||||
|
parseAsBoolean.withDefault(false),
|
||||||
|
);
|
||||||
|
const open = !!isDisableOpen && !!accountId;
|
||||||
|
|
||||||
|
const cachedAccount = accountId
|
||||||
|
? queryClient.getQueryData<{
|
||||||
|
data: ServiceaccounttypesServiceAccountDTO;
|
||||||
|
}>(getGetServiceAccountQueryKey({ id: accountId }))
|
||||||
|
: null;
|
||||||
|
const accountName = cachedAccount?.data?.name;
|
||||||
|
|
||||||
|
const {
|
||||||
|
mutate: updateStatus,
|
||||||
|
isLoading: isDisabling,
|
||||||
|
} = useUpdateServiceAccountStatus({
|
||||||
|
mutation: {
|
||||||
|
onSuccess: async () => {
|
||||||
|
toast.success('Service account disabled', { richColors: true });
|
||||||
|
await setIsDisableOpen(null);
|
||||||
|
await setAccountId(null);
|
||||||
|
await invalidateListServiceAccounts(queryClient);
|
||||||
|
},
|
||||||
|
onError: (error) => {
|
||||||
|
const errMessage =
|
||||||
|
convertToApiError(
|
||||||
|
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||||
|
)?.getErrorMessage() || 'Failed to disable service account';
|
||||||
|
toast.error(errMessage, { richColors: true });
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
function handleConfirm(): void {
|
||||||
|
if (!accountId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
updateStatus({
|
||||||
|
pathParams: { id: accountId },
|
||||||
|
data: { status: 'DISABLED' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCancel(): void {
|
||||||
|
setIsDisableOpen(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<DialogWrapper
|
||||||
|
open={open}
|
||||||
|
onOpenChange={(isOpen): void => {
|
||||||
|
if (!isOpen) {
|
||||||
|
handleCancel();
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
title={`Disable service account ${accountName ?? ''}?`}
|
||||||
|
width="narrow"
|
||||||
|
className="alert-dialog sa-disable-dialog"
|
||||||
|
showCloseButton={false}
|
||||||
|
disableOutsideClick={false}
|
||||||
|
>
|
||||||
|
<p className="sa-disable-dialog__body">
|
||||||
|
Disabling this service account will revoke access for all its keys. Any
|
||||||
|
systems using this account will lose access immediately.
|
||||||
|
</p>
|
||||||
|
<DialogFooter className="sa-disable-dialog__footer">
|
||||||
|
<Button variant="solid" color="secondary" size="sm" onClick={handleCancel}>
|
||||||
|
<X size={12} />
|
||||||
|
Cancel
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="solid"
|
||||||
|
color="destructive"
|
||||||
|
size="sm"
|
||||||
|
loading={isDisabling}
|
||||||
|
onClick={handleConfirm}
|
||||||
|
>
|
||||||
|
<PowerOff size={12} />
|
||||||
|
Disable
|
||||||
|
</Button>
|
||||||
|
</DialogFooter>
|
||||||
|
</DialogWrapper>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default DisableAccountModal;
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user