mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-20 15:52:41 +00:00
Compare commits
151 Commits
platform-p
...
fix/chart-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41f5f6e421 | ||
|
|
da2e050a23 | ||
|
|
5a69f16410 | ||
|
|
07afef5c5e | ||
|
|
dcae722b53 | ||
|
|
92b07d15ea | ||
|
|
a0dad1602e | ||
|
|
5cf5b70aca | ||
|
|
db51b23e3d | ||
|
|
80c46b3414 | ||
|
|
2b929421a1 | ||
|
|
2792e20aa2 | ||
|
|
473be1b174 | ||
|
|
6d0c13f9a7 | ||
|
|
5cc562ba35 | ||
|
|
22099962be | ||
|
|
2559b52bb1 | ||
|
|
7523596043 | ||
|
|
9f4d9eef46 | ||
|
|
d9cddbfb42 | ||
|
|
0828c13f7c | ||
|
|
2c558b9bf7 | ||
|
|
48b588fe48 | ||
|
|
5c86b80682 | ||
|
|
75512a81c6 | ||
|
|
6aaea79b73 | ||
|
|
04643264ff | ||
|
|
3aa0d8a7fd | ||
|
|
7f1d350ffe | ||
|
|
1d3134959d | ||
|
|
b86bd24dd9 | ||
|
|
4c49d45cbf | ||
|
|
9b3d3453b1 | ||
|
|
9d981d8a13 | ||
|
|
6de4520a95 | ||
|
|
f566909320 | ||
|
|
aa39db8ac2 | ||
|
|
eb2c6b78c8 | ||
|
|
2d2d0c3d9f | ||
|
|
8a4544cbac | ||
|
|
8898f02698 | ||
|
|
f277009ff8 | ||
|
|
17c6b79d79 | ||
|
|
76d6c23217 | ||
|
|
82dffdda56 | ||
|
|
8f38398863 | ||
|
|
eb39772d3c | ||
|
|
df72c897f9 | ||
|
|
4bbe5ead07 | ||
|
|
e36689ecba | ||
|
|
2c948ef9f6 | ||
|
|
3c30114642 | ||
|
|
d042fad1e3 | ||
|
|
235c606b44 | ||
|
|
a49d7e1662 | ||
|
|
b3e41b5520 | ||
|
|
83bb97cc58 | ||
|
|
68ea28cf6b | ||
|
|
3726c0aac1 | ||
|
|
dae2d3239b | ||
|
|
0c660f8618 | ||
|
|
97cffbc20a | ||
|
|
8317eb1735 | ||
|
|
b1789ea3f7 | ||
|
|
3b41d0a731 | ||
|
|
a171f7122f | ||
|
|
4a20e93b20 | ||
|
|
d4dc709aa5 | ||
|
|
cd014652a1 | ||
|
|
538351131f | ||
|
|
a3bd72ad86 | ||
|
|
786070d90b | ||
|
|
e699ad8122 | ||
|
|
a1cc05848c | ||
|
|
6d78df2275 | ||
|
|
df49484bea | ||
|
|
72b0f27494 | ||
|
|
e36b647bc7 | ||
|
|
b491772eaa | ||
|
|
128497f27a | ||
|
|
9e466b56b2 | ||
|
|
4ad0baa2a2 | ||
|
|
24b588bfba | ||
|
|
e5867cc2ad | ||
|
|
b420ca494e | ||
|
|
e4693ce64c | ||
|
|
ca9b3a910a | ||
|
|
9dc7d2389a | ||
|
|
da5860297f | ||
|
|
54fca5ba44 | ||
|
|
66f4c3d6ec | ||
|
|
a0f407a848 | ||
|
|
3562de8fbb | ||
|
|
ef80fb39fd | ||
|
|
594d4dc737 | ||
|
|
01415b58be | ||
|
|
f7728c9019 | ||
|
|
3fffe6e198 | ||
|
|
4594f4ffe3 | ||
|
|
b660dc8573 | ||
|
|
c7b9c0d36c | ||
|
|
4433d22c09 | ||
|
|
9443c42334 | ||
|
|
9387c96023 | ||
|
|
ff2555d8dd | ||
|
|
85953c9a4c | ||
|
|
0155cc38cb | ||
|
|
c217cc96c3 | ||
|
|
580cf32eb5 | ||
|
|
6d3580cbfa | ||
|
|
6c5d36caa9 | ||
|
|
c4a6c7e277 | ||
|
|
c9cd974dca | ||
|
|
5b3f121431 | ||
|
|
c79373314a | ||
|
|
858cd287fa | ||
|
|
afdb674068 | ||
|
|
30a6721472 | ||
|
|
518dfcbe59 | ||
|
|
424127c27c | ||
|
|
2dcb817de1 | ||
|
|
f6f8c78aaf | ||
|
|
3c99dfdfa5 | ||
|
|
6ed72519b8 | ||
|
|
fe910aaa0f | ||
|
|
0d362b3ba8 | ||
|
|
51e9ffb847 | ||
|
|
f497a154a2 | ||
|
|
659fa361ef | ||
|
|
84e77182f6 | ||
|
|
32619869e7 | ||
|
|
e9093126b2 | ||
|
|
195f44802d | ||
|
|
948bdb7881 | ||
|
|
7274d51236 | ||
|
|
5b580f8e63 | ||
|
|
5ce1f0b855 | ||
|
|
291e6a2af8 | ||
|
|
3a34fb4813 | ||
|
|
189781748a | ||
|
|
e7c812e07f | ||
|
|
c0e2dad3b5 | ||
|
|
bb3ccfae3b | ||
|
|
08f261b0f0 | ||
|
|
9b4fd46d2c | ||
|
|
f348f47951 | ||
|
|
9d700563c1 | ||
|
|
54b80c3949 | ||
|
|
1c815b130c | ||
|
|
9e9879fc8d | ||
|
|
af87c2e80b |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -4,7 +4,6 @@ services:
|
||||
container_name: signoz-otel-collector-dev
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
environment:
|
||||
|
||||
13
.github/CODEOWNERS
vendored
13
.github/CODEOWNERS
vendored
@@ -43,6 +43,12 @@
|
||||
/pkg/analytics/ @vikrantgupta25
|
||||
/pkg/statsreporter/ @vikrantgupta25
|
||||
|
||||
# Emailing Owners
|
||||
|
||||
/pkg/emailing/ @vikrantgupta25
|
||||
/pkg/types/emailtypes/ @vikrantgupta25
|
||||
/templates/email/ @vikrantgupta25
|
||||
|
||||
# Querier Owners
|
||||
|
||||
/pkg/querier/ @srikanthccv
|
||||
@@ -110,6 +116,7 @@
|
||||
# Dashboard Owners
|
||||
|
||||
/frontend/src/hooks/dashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/providers/Dashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Types
|
||||
|
||||
@@ -131,3 +138,9 @@
|
||||
|
||||
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Libs + Components
|
||||
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
|
||||
/frontend/src/lib/dashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/lib/dashboardVariables/ @SigNoz/pulse-frontend
|
||||
/frontend/src/components/NewSelect/ @SigNoz/pulse-frontend
|
||||
|
||||
1
.github/workflows/build-enterprise.yaml
vendored
1
.github/workflows/build-enterprise.yaml
vendored
@@ -70,6 +70,7 @@ jobs:
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
echo 'DOCS_BASE_URL="https://signoz.io"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
1
.github/workflows/build-staging.yaml
vendored
1
.github/workflows/build-staging.yaml
vendored
@@ -69,6 +69,7 @@ jobs:
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
echo 'DOCS_BASE_URL="https://staging.signoz.io"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
10
.github/workflows/goci.yaml
vendored
10
.github/workflows/goci.yaml
vendored
@@ -93,3 +93,13 @@ jobs:
|
||||
run: |
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
||||
- name: node-install
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: install-frontend
|
||||
run: cd frontend && yarn install
|
||||
- name: generate-api-clients
|
||||
run: |
|
||||
cd frontend && yarn generate:api
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in generated api clients. Run yarn generate:api in frontend/ locally and commit."; exit 1)
|
||||
|
||||
12
.github/workflows/gor-signoz-community.yaml
vendored
12
.github/workflows/gor-signoz-community.yaml
vendored
@@ -3,8 +3,8 @@ name: gor-signoz-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -21,6 +21,10 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
- name: node-setup
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
@@ -89,7 +93,7 @@ jobs:
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: release --config ${{ env.CONFIG_PATH }} --clean --split
|
||||
workdir: .
|
||||
env:
|
||||
@@ -147,7 +151,7 @@ jobs:
|
||||
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: continue --merge
|
||||
workdir: .
|
||||
env:
|
||||
|
||||
15
.github/workflows/gor-signoz.yaml
vendored
15
.github/workflows/gor-signoz.yaml
vendored
@@ -3,8 +3,8 @@ name: gor-signoz
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -36,8 +36,13 @@ jobs:
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
|
||||
echo 'DOCS_BASE_URL="https://signoz.io"' >> .env
|
||||
- name: node-setup
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -104,7 +109,7 @@ jobs:
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: release --config ${{ env.CONFIG_PATH }} --clean --split
|
||||
workdir: .
|
||||
env:
|
||||
@@ -161,7 +166,7 @@ jobs:
|
||||
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: continue --merge
|
||||
workdir: .
|
||||
env:
|
||||
|
||||
12
.github/workflows/integrationci.yaml
vendored
12
.github/workflows/integrationci.yaml
vendored
@@ -42,18 +42,20 @@ jobs:
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- dashboard
|
||||
- querier
|
||||
- ttl
|
||||
- preference
|
||||
- logspipelines
|
||||
- preference
|
||||
- querier
|
||||
- role
|
||||
- ttl
|
||||
- alerts
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.10.1
|
||||
- 25.10.5
|
||||
schema-migrator-version:
|
||||
- v0.129.7
|
||||
- v0.142.0
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
|
||||
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,8 +1,11 @@
|
||||
|
||||
node_modules
|
||||
|
||||
# editor
|
||||
.vscode
|
||||
!.vscode/settings.json
|
||||
.zed
|
||||
.idea
|
||||
|
||||
deploy/docker/environment_tiny/common_test
|
||||
frontend/node_modules
|
||||
@@ -31,8 +34,6 @@ frontend/yarn-debug.log*
|
||||
frontend/yarn-error.log*
|
||||
frontend/src/constants/env.ts
|
||||
|
||||
.idea
|
||||
|
||||
**/build
|
||||
**/storage
|
||||
**/locust-scripts/__pycache__/
|
||||
@@ -57,7 +58,6 @@ bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
e2e/node_modules/
|
||||
@@ -230,5 +230,3 @@ cython_debug/
|
||||
pyrightconfig.json
|
||||
|
||||
|
||||
# cursor files
|
||||
frontend/.cursor/
|
||||
|
||||
6
.vscode/settings.json
vendored
6
.vscode/settings.json
vendored
@@ -11,5 +11,11 @@
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "golang.go"
|
||||
},
|
||||
"[sql]": {
|
||||
"editor.defaultFormatter": "adpyke.vscode-sql-formatter"
|
||||
},
|
||||
"[html]": {
|
||||
"editor.defaultFormatter": "vscode.html-language-features"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,25 +176,6 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
|
||||
|
||||
### Unsere Projektbetreuer
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Dokumentation
|
||||
|
||||
39
README.md
39
README.md
@@ -66,6 +66,17 @@ Read [more](https://signoz.io/metrics-and-dashboards/).
|
||||
|
||||

|
||||
|
||||
### LLM Observability
|
||||
|
||||
Monitor and debug your LLM applications with comprehensive observability. Track LLM calls, analyze token usage, monitor performance, and gain insights into your AI application's behavior in production.
|
||||
|
||||
SigNoz LLM observability helps you understand how your language models are performing, identify issues with prompts and responses, track token usage and costs, and optimize your AI applications for better performance and reliability.
|
||||
|
||||
[Get started with LLM Observability →](https://signoz.io/docs/llm-observability/)
|
||||
|
||||

|
||||
|
||||
|
||||
### Alerts
|
||||
|
||||
Use alerts in SigNoz to get notified when anything unusual happens in your application. You can set alerts on any type of telemetry signal (logs, metrics, traces), create thresholds and set up a notification channel to get notified. Advanced features like alert history and anomaly detection can help you create smarter alerts.
|
||||
@@ -210,34 +221,6 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
|
||||
|
||||
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
|
||||
|
||||
### Project maintainers
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
|
||||
- [Ekansh Gupta](https://github.com/eKuG)
|
||||
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Vikrant Gupta](https://github.com/vikrantgupta25)
|
||||
- [Sagar Rajput](https://github.com/SagarRajput-7)
|
||||
- [Shaheer Kochai](https://github.com/ahmadshaheer)
|
||||
- [Amlan Kumar Nandy](https://github.com/amlannandy)
|
||||
- [Sahil Khan](https://github.com/sawhil)
|
||||
- [Aditya Singh](https://github.com/aks07)
|
||||
- [Abhi Kumar](https://github.com/ahrefabhi)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
|
||||
@@ -187,25 +187,6 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
|
||||
|
||||
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
|
||||
|
||||
### 项目维护人员
|
||||
|
||||
#### 后端
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### 前端
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### 运维开发
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 文档
|
||||
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
@@ -77,15 +76,18 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore, _ licensing.Licensing, _ dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Module, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
|
||||
},
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
@@ -28,7 +29,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -116,16 +116,21 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore, licensing licensing.Licensing, dashboardModule dashboard.Module) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), licensing, dashboardModule)
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, role, queryParser, querier, licensing)
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
|
||||
},
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
|
||||
@@ -193,6 +193,15 @@ emailing:
|
||||
templates:
|
||||
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
|
||||
directory: /opt/signoz/conf/templates/email
|
||||
format:
|
||||
header:
|
||||
enabled: false
|
||||
logo_url: ""
|
||||
help:
|
||||
enabled: false
|
||||
email: ""
|
||||
footer:
|
||||
enabled: false
|
||||
smtp:
|
||||
# The SMTP server address.
|
||||
address: localhost:25
|
||||
@@ -285,7 +294,6 @@ flagger:
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
@@ -300,3 +308,14 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
# deleted, updated, or have their password changed through the UI.
|
||||
enabled: false
|
||||
# The email address of the root user.
|
||||
email: ""
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.108.0
|
||||
image: signoz/signoz:v0.112.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -209,12 +209,11 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
image: signoz/signoz-otel-collector:v0.142.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
@@ -233,7 +232,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.108.0
|
||||
image: signoz/signoz:v0.112.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,12 +150,11 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.12
|
||||
image: signoz/signoz-otel-collector:v0.142.0
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
configs:
|
||||
- source: otel-collector-config
|
||||
target: /etc/otel-collector-config.yaml
|
||||
@@ -176,7 +175,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.12
|
||||
image: signoz/signoz-schema-migrator:v0.142.0
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.108.0}
|
||||
image: signoz/signoz:${VERSION:-v0.112.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,13 +213,12 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
@@ -239,7 +238,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +249,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.108.0}
|
||||
image: signoz/signoz:${VERSION:-v0.112.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,13 +144,12 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
- --copy-path=/var/tmp/collector-config.yaml
|
||||
- --feature-gates=-pkg.translator.prometheus.NormalizeName
|
||||
volumes:
|
||||
- ./otel-collector-config.yaml:/etc/otel-collector-config.yaml
|
||||
- ../common/signoz/otel-collector-opamp-config.yaml:/etc/manager-config.yaml
|
||||
@@ -166,7 +165,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +177,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.12}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.142.0}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
4441
docs/api/openapi.yml
4441
docs/api/openapi.yml
File diff suppressed because it is too large
Load Diff
@@ -155,6 +155,7 @@ The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAP
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **RequestExamples**: An array of `handler.OpenAPIExample` that provide concrete request payloads in the generated spec. See [Adding request examples](#adding-request-examples) below.
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
@@ -172,8 +173,170 @@ See existing examples in:
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## OpenAPI schema details for request/response types
|
||||
|
||||
The OpenAPI spec is generated from the Go types you pass as `Request` and `Response` in `OpenAPIDef`. The following struct tags and interfaces control how those types appear in the generated schema.
|
||||
|
||||
### Adding request examples
|
||||
|
||||
Use the `RequestExamples` field in `OpenAPIDef` to provide concrete request payloads. Each example is a `handler.OpenAPIExample`:
|
||||
|
||||
```go
|
||||
type OpenAPIExample struct {
|
||||
Name string // unique key for the example (e.g. "traces_time_series")
|
||||
Summary string // short description shown in docs (e.g. "Time series: count spans grouped by service")
|
||||
Description string // optional longer description
|
||||
Value any // the example payload, typically map[string]any
|
||||
}
|
||||
```
|
||||
|
||||
For reference, see `pkg/apiserver/signozapiserver/querier.go` which defines examples inline for the `/api/v5/query_range` endpoint:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v5/query_range", handler.New(provider.authZ.ViewAccess(provider.querierHandler.QueryRange), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"querier"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
// ...
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// ... more examples
|
||||
},
|
||||
// ...
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### `required` tag
|
||||
|
||||
Use `required:"true"` on struct fields where the property is expected to be **present** in the JSON payload. This is different from the zero value, a field can have its zero value (e.g. `0`, `""`, `false`) and still be required. The `required` tag means the key itself must exist in the JSON object.
|
||||
|
||||
```go
|
||||
type ListItem struct {
|
||||
...
|
||||
}
|
||||
|
||||
type ListResponse struct {
|
||||
List []ListItem `json:"list" required:"true" nullable:"true"`
|
||||
Total uint64 `json:"total" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
In this example, a response like `{"list": null, "total": 0}` is valid. Both keys are present (satisfying `required`), `total` has its zero value, and `list` is null (allowed by `nullable`). But `{"total": 0}` would violate the schema because the `list` key is missing.
|
||||
|
||||
### `nullable` tag
|
||||
|
||||
Use `nullable:"true"` on struct fields that can be `null` in the JSON payload. This is especially important for **slice and map fields** because in Go, the zero value for these types is `nil`, which serializes to `null` in JSON (not `[]` or `{}`).
|
||||
|
||||
Be explicit about the distinction:
|
||||
|
||||
- **Nullable list** (`nullable:"true"`): the field can be `null`. Use this when the Go code may return `nil` for the slice.
|
||||
- **Non-nullable list** (no `nullable` tag): the field is always an array, never `null`. Ensure the Go code initializes it to an empty slice (e.g. `make([]T, 0)`) before serializing.
|
||||
|
||||
```go
|
||||
// Non-nullable: Go code must ensure this is always an initialized slice.
|
||||
type NonNullableExample struct {
|
||||
Items []Item `json:"items" required:"true"`
|
||||
}
|
||||
```
|
||||
|
||||
When defining your types, ask yourself: "Can this field be `null` in the JSON response, or is it always an array/object?" If the Go code ever returns a `nil` slice or map, mark it `nullable:"true"`.
|
||||
|
||||
### `Enum()` method
|
||||
|
||||
For types that have a fixed set of acceptable values, implement the `Enum() []any` method. This generates an `enum` constraint in the JSON schema so the OpenAPI spec accurately restricts the values.
|
||||
|
||||
```go
|
||||
type Signal struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
var (
|
||||
SignalTraces = Signal{valuer.NewString("traces")}
|
||||
SignalLogs = Signal{valuer.NewString("logs")}
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
)
|
||||
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This produces the following in the generated OpenAPI spec:
|
||||
|
||||
```yaml
|
||||
Signal:
|
||||
enum:
|
||||
- traces
|
||||
- logs
|
||||
- metrics
|
||||
type: string
|
||||
```
|
||||
|
||||
Every type with a known set of values **must** implement `Enum()`. Without it, the JSON schema will only show the base type (e.g. `string`) with no value constraints.
|
||||
|
||||
### `JSONSchema()` method (custom schema)
|
||||
|
||||
For types that need a completely custom JSON schema (for example, a field that accepts either a string or a number), implement the `jsonschema.Exposer` interface:
|
||||
|
||||
```go
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a duration string or seconds.")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
- **Add `required:"true"`** on fields where the key must be present in the JSON (this is about key presence, not about the zero value).
|
||||
- **Add `nullable:"true"`** on fields that can be `null`. Pay special attention to slices and maps -- in Go these default to `nil` which serializes to `null`. If the field should always be an array, initialize it and do not mark it nullable.
|
||||
- **Implement `Enum()`** on every type that has a fixed set of acceptable values so the JSON schema generates proper `enum` constraints.
|
||||
- **Add request examples** via `RequestExamples` in `OpenAPIDef` for any non-trivial endpoint. See `pkg/apiserver/signozapiserver/querier.go` for reference.
|
||||
|
||||
@@ -24,7 +24,7 @@ The configuration file is a JSON array containing data source objects. Each obje
|
||||
| `label` | `string` | Display name shown to users (e.g., `"AWS EC2"`) |
|
||||
| `tags` | `string[]` | Array of category tags for grouping (e.g., `["AWS"]`, `["database"]`) |
|
||||
| `module` | `string` | Destination module after onboarding completion |
|
||||
| `imgUrl` | `string` | Path to the logo/icon (e.g., `"/Logos/ec2.svg"`) |
|
||||
| `imgUrl` | `string` | Path to the logo/icon **(SVG required)** (e.g., `"/Logos/ec2.svg"`) |
|
||||
|
||||
### Optional Keys
|
||||
|
||||
|
||||
@@ -2,12 +2,18 @@ package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaserver"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/authzstore/sqlauthzstore"
|
||||
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
@@ -15,84 +21,320 @@ import (
|
||||
|
||||
type provider struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
openfgaServer *openfgaserver.Server
|
||||
licensing licensing.Licensing
|
||||
store roletypes.Store
|
||||
registry []authz.RegisterTypeable
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry ...authz.RegisterTypeable) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema, licensing, registry)
|
||||
})
|
||||
}
|
||||
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile, licensing licensing.Licensing, registry []authz.RegisterTypeable) (authz.AuthZ, error) {
|
||||
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
|
||||
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
openfgaServer, err := openfgaserver.NewOpenfgaServer(ctx, pkgAuthzService)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
openfgaServer: openfgaServer,
|
||||
licensing: licensing,
|
||||
store: sqlauthzstore.NewSqlAuthzStore(sqlstore),
|
||||
registry: registry,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.pkgAuthzService.Start(ctx)
|
||||
return provider.openfgaServer.Start(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.pkgAuthzService.Stop(ctx)
|
||||
return provider.openfgaServer.Stop(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Check(ctx, tuple)
|
||||
return provider.openfgaServer.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.openfgaServer.CheckWithTupleCreation(ctx, claims, orgID, relation, typeable, selectors, roleSelectors)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, _ authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, roleSelectors []authtypes.Selector) error {
|
||||
return provider.openfgaServer.CheckWithTupleCreationWithoutClaims(ctx, orgID, relation, typeable, selectors, roleSelectors)
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
return provider.openfgaServer.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
|
||||
return provider.openfgaServer.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Write(ctx, additions, deletions)
|
||||
return provider.openfgaServer.Write(ctx, additions, deletions)
|
||||
}
|
||||
|
||||
func (provider *provider) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.Get(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (provider *provider) GetByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.GetByOrgIDAndName(ctx, orgID, name)
|
||||
}
|
||||
|
||||
func (provider *provider) List(ctx context.Context, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.List(ctx, orgID)
|
||||
}
|
||||
|
||||
func (provider *provider) ListByOrgIDAndNames(ctx context.Context, orgID valuer.UUID, names []string) ([]*roletypes.Role, error) {
|
||||
return provider.pkgAuthzService.ListByOrgIDAndNames(ctx, orgID, names)
|
||||
}
|
||||
|
||||
func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
return provider.pkgAuthzService.Grant(ctx, orgID, name, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) ModifyGrant(ctx context.Context, orgID valuer.UUID, existingRoleName string, updatedRoleName string, subject string) error {
|
||||
return provider.pkgAuthzService.ModifyGrant(ctx, orgID, existingRoleName, updatedRoleName, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, name string, subject string) error {
|
||||
return provider.pkgAuthzService.Revoke(ctx, orgID, name, subject)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedRoles(ctx context.Context, orgID valuer.UUID, managedRoles []*roletypes.Role) error {
|
||||
return provider.pkgAuthzService.CreateManagedRoles(ctx, orgID, managedRoles)
|
||||
}
|
||||
|
||||
func (provider *provider) CreateManagedUserRoleTransactions(ctx context.Context, orgID valuer.UUID, userID valuer.UUID) error {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
grantTuples, err := provider.getManagedRoleGrantTuples(orgID, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tuples = append(tuples, grantTuples...)
|
||||
|
||||
managedRoleTuples, err := provider.getManagedRoleTransactionTuples(orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tuples = append(tuples, managedRoleTuples...)
|
||||
|
||||
return provider.Write(ctx, tuples, nil)
|
||||
}
|
||||
|
||||
func (provider *provider) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (provider *provider) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
existingRole, err := provider.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if existingRole != nil {
|
||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
||||
}
|
||||
|
||||
err = provider.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (provider *provider) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
typeables := make([]authtypes.Typeable, 0)
|
||||
for _, register := range provider.registry {
|
||||
typeables = append(typeables, register.MustGetTypeables()...)
|
||||
}
|
||||
// role module cannot self register itself!
|
||||
typeables = append(typeables, provider.MustGetTypeables()...)
|
||||
|
||||
resources := make([]*authtypes.Resource, 0)
|
||||
for _, typeable := range typeables {
|
||||
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
|
||||
}
|
||||
|
||||
return resources
|
||||
}
|
||||
|
||||
func (provider *provider) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects := make([]*authtypes.Object, 0)
|
||||
for _, resource := range provider.GetResources(ctx) {
|
||||
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
|
||||
resourceObjects, err := provider.
|
||||
ListObjects(
|
||||
ctx,
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
|
||||
relation,
|
||||
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects = append(objects, resourceObjects...)
|
||||
}
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return provider.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (provider *provider) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.Write(ctx, additionTuples, deletionTuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableRole, err := provider.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.CanEditDelete()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return provider.store.Delete(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (provider *provider) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
||||
}
|
||||
|
||||
func (provider *provider) getManagedRoleGrantTuples(orgID valuer.UUID, userID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := []*openfgav1.TupleKey{}
|
||||
|
||||
// Grant the admin role to the user
|
||||
adminSubject := authtypes.MustNewSubject(authtypes.TypeableUser, userID.String(), orgID, nil)
|
||||
adminTuple, err := authtypes.TypeableRole.Tuples(
|
||||
adminSubject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAdminRoleName),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tuples = append(tuples, adminTuple...)
|
||||
|
||||
// Grant the admin role to the anonymous user
|
||||
anonymousSubject := authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
anonymousTuple, err := authtypes.TypeableRole.Tuples(
|
||||
anonymousSubject,
|
||||
authtypes.RelationAssignee,
|
||||
[]authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeRole, roletypes.SigNozAnonymousRoleName),
|
||||
},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tuples = append(tuples, anonymousTuple...)
|
||||
|
||||
return tuples, nil
|
||||
}
|
||||
|
||||
func (provider *provider) getManagedRoleTransactionTuples(orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
transactionsByRole := make(map[string][]*authtypes.Transaction)
|
||||
for _, register := range provider.registry {
|
||||
for roleName, txns := range register.MustGetManagedRoleTransactions() {
|
||||
transactionsByRole[roleName] = append(transactionsByRole[roleName], txns...)
|
||||
}
|
||||
}
|
||||
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
for roleName, transactions := range transactionsByRole {
|
||||
for _, txn := range transactions {
|
||||
typeable := authtypes.MustNewTypeableFromType(txn.Object.Resource.Type, txn.Object.Resource.Name)
|
||||
txnTuples, err := typeable.Tuples(
|
||||
authtypes.MustNewSubject(
|
||||
authtypes.TypeableRole,
|
||||
roleName,
|
||||
orgID,
|
||||
&authtypes.RelationAssignee,
|
||||
),
|
||||
txn.Relation,
|
||||
[]authtypes.Selector{txn.Object.Selector},
|
||||
orgID,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tuples = append(tuples, txnTuples...)
|
||||
}
|
||||
}
|
||||
|
||||
return tuples, nil
|
||||
}
|
||||
|
||||
83
ee/authz/openfgaserver/server.go
Normal file
83
ee/authz/openfgaserver/server.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package openfgaserver
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
)
|
||||
|
||||
type Server struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
}
|
||||
|
||||
func NewOpenfgaServer(ctx context.Context, pkgAuthzService authz.AuthZ) (*Server, error) {
|
||||
|
||||
return &Server{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (server *Server) Start(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Start(ctx)
|
||||
}
|
||||
|
||||
func (server *Server) Stop(ctx context.Context) error {
|
||||
return server.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
func (server *Server) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = server.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (server *Server) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return server.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return server.pkgAuthzService.Write(ctx, additions, deletions)
|
||||
}
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
@@ -26,12 +25,11 @@ type module struct {
|
||||
pkgDashboardModule dashboard.Module
|
||||
store dashboardtypes.Store
|
||||
settings factory.ScopedProviderSettings
|
||||
role role.Module
|
||||
querier querier.Querier
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, role role.Module, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
|
||||
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
|
||||
|
||||
@@ -39,7 +37,6 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
|
||||
pkgDashboardModule: pkgDashboardModule,
|
||||
store: store,
|
||||
settings: scopedProviderSettings,
|
||||
role: role,
|
||||
querier: querier,
|
||||
licensing: licensing,
|
||||
}
|
||||
@@ -59,29 +56,6 @@ func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publi
|
||||
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
|
||||
}
|
||||
|
||||
role, err := module.role.GetOrCreate(ctx, roletypes.NewRole(roletypes.AnonymousUserRoleName, roletypes.AnonymousUserRoleDescription, roletypes.RoleTypeManaged.StringValue(), orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.role.Assign(ctx, role.ID, orgID, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
additionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.role.PatchObjects(ctx, orgID, role.ID, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -126,6 +100,7 @@ func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id
|
||||
|
||||
return []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, authtypes.WildCardSelectorString),
|
||||
}, storableDashboard.OrgID, nil
|
||||
}
|
||||
|
||||
@@ -188,29 +163,6 @@ func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashb
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.role.GetOrCreate(ctx, roletypes.NewRole(roletypes.AnonymousUserRoleName, roletypes.AnonymousUserRoleDescription, roletypes.RoleTypeManaged.StringValue(), orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.role.PatchObjects(ctx, orgID, role.ID, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -248,10 +200,6 @@ func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, m
|
||||
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
|
||||
}
|
||||
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return module.pkgDashboardModule.MustGetTypeables()
|
||||
}
|
||||
|
||||
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.List(ctx, orgID)
|
||||
}
|
||||
@@ -264,34 +212,27 @@ func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valu
|
||||
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
|
||||
}
|
||||
|
||||
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.role.GetOrCreate(ctx, roletypes.NewRole(roletypes.AnonymousUserRoleName, roletypes.AnonymousUserRoleDescription, roletypes.RoleTypeManaged.StringValue(), orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.role.PatchObjects(ctx, orgID, role.ID, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return module.pkgDashboardModule.MustGetTypeables()
|
||||
}
|
||||
|
||||
func (module *module) MustGetManagedRoleTransactions() map[string][]*authtypes.Transaction {
|
||||
return map[string][]*authtypes.Transaction{
|
||||
roletypes.SigNozAnonymousRoleName: {
|
||||
{
|
||||
Relation: authtypes.RelationRead,
|
||||
Object: *authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Type: authtypes.TypeMetaResource,
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, "*"),
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (module *module) deletePublic(ctx context.Context, _ valuer.UUID, dashboardID valuer.UUID) error {
|
||||
return module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
}
|
||||
|
||||
178
ee/querier/handler.go
Normal file
178
ee/querier/handler.go
Normal file
@@ -0,0 +1,178 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
set factory.ProviderSettings
|
||||
querier querier.Querier
|
||||
community querier.Handler
|
||||
}
|
||||
|
||||
func NewHandler(set factory.ProviderSettings, querier querier.Querier, community querier.Handler) querier.Handler {
|
||||
return &handler{
|
||||
set: set,
|
||||
querier: querier,
|
||||
community: community,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
h.set.Logger.ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
}
|
||||
|
||||
// regular query range request, delegate to community handler
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
h.community.QueryRange(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) QueryRawStream(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.QueryRawStream(rw, req)
|
||||
}
|
||||
|
||||
func (h *handler) ReplaceVariables(rw http.ResponseWriter, req *http.Request) {
|
||||
h.community.ReplaceVariables(rw, req)
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](h.set.Logger),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](h.set.Logger),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](h.querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](h.set.Logger),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
@@ -50,7 +50,7 @@ type GetAnomaliesResponse struct {
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
// (rounded value for past peiod) + (seasonal growth)
|
||||
// (rounded value for past period) + (seasonal growth)
|
||||
//
|
||||
// score = abs(value - prediction) / stddev (current_season_query)
|
||||
type anomalyQueryParams struct {
|
||||
@@ -74,12 +74,12 @@ type anomalyQueryParams struct {
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal periods to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal periods to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
|
||||
@@ -2,17 +2,13 @@ package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/cloudintegrations"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/integrations"
|
||||
@@ -33,7 +29,6 @@ type APIHandlerOptions struct {
|
||||
IntegrationsController *integrations.Controller
|
||||
CloudIntegrationsController *cloudintegrations.Controller
|
||||
LogsParsingPipelineController *logparsingpipeline.LogParsingPipelineController
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
@@ -46,7 +41,7 @@ type APIHandler struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
RuleManager: opts.RulesManager,
|
||||
@@ -56,11 +51,9 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FluxInterval: opts.FluxInterval,
|
||||
AlertmanagerAPI: alertmanager.NewAPI(signoz.Alertmanager),
|
||||
LicensingAPI: httplicensing.NewLicensingAPI(signoz.Licensing),
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -81,10 +74,6 @@ func (ah *APIHandler) UM() *usage.Manager {
|
||||
return ah.opts.UsageManager
|
||||
}
|
||||
|
||||
func (ah *APIHandler) Gateway() *httputil.ReverseProxy {
|
||||
return ah.opts.Gateway
|
||||
}
|
||||
|
||||
// RegisterRoutes registers routes for this handler on the given router
|
||||
func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// note: add ee override methods first
|
||||
@@ -107,14 +96,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// v4
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
// Gateway
|
||||
router.PathPrefix(gateway.RoutePrefix).HandlerFunc(am.EditAccess(ah.ServeGatewayHTTP))
|
||||
|
||||
ah.APIHandler.RegisterRoutes(router, am)
|
||||
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func (ah *APIHandler) ServeGatewayHTTP(rw http.ResponseWriter, req *http.Request) {
|
||||
ctx := req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
validPath := false
|
||||
for _, allowedPrefix := range gateway.AllowedPrefix {
|
||||
if strings.HasPrefix(req.URL.Path, gateway.RoutePrefix+allowedPrefix) {
|
||||
validPath = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !validPath {
|
||||
rw.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
license, err := ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
//Create headers
|
||||
var licenseKey string
|
||||
if license != nil {
|
||||
licenseKey = license.Key
|
||||
}
|
||||
|
||||
req.Header.Set("X-Signoz-Cloud-Api-Key", licenseKey)
|
||||
req.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
req.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
ah.Gateway().ServeHTTP(rw, req)
|
||||
}
|
||||
@@ -2,16 +2,11 @@ package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"runtime/debug"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
@@ -20,8 +15,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
@@ -144,140 +137,3 @@ func (aH *APIHandler) queryRangeV4(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
func extractSeasonality(anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) anomalyV2.Seasonality {
|
||||
for _, fn := range anomalyQuery.Functions {
|
||||
if fn.Name == qbtypes.FunctionNameAnomaly {
|
||||
for _, arg := range fn.Args {
|
||||
if arg.Name == "seasonality" {
|
||||
if seasonalityStr, ok := arg.Value.(string); ok {
|
||||
switch seasonalityStr {
|
||||
case "weekly":
|
||||
return anomalyV2.SeasonalityWeekly
|
||||
case "hourly":
|
||||
return anomalyV2.SeasonalityHourly
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return anomalyV2.SeasonalityDaily // default
|
||||
}
|
||||
|
||||
func createAnomalyProvider(aH *APIHandler, seasonality anomalyV2.Seasonality) anomalyV2.Provider {
|
||||
switch seasonality {
|
||||
case anomalyV2.SeasonalityWeekly:
|
||||
return anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
case anomalyV2.SeasonalityHourly:
|
||||
return anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
default:
|
||||
return anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](aH.Signoz.Querier),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](aH.Signoz.Instrumentation.Logger()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func (aH *APIHandler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := createAnomalyProvider(aH, seasonality)
|
||||
|
||||
return provider.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{Params: queryRangeRequest})
|
||||
}
|
||||
|
||||
func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
|
||||
bodyBytes, err := io.ReadAll(req.Body)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to read request body: %v", err))
|
||||
return
|
||||
}
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
|
||||
ctx := req.Context()
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
var queryRangeRequest qbtypes.QueryRangeRequest
|
||||
if err := json.NewDecoder(req.Body).Decode(&queryRangeRequest); err != nil {
|
||||
render.Error(rw, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to decode request body: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
stackTrace := string(debug.Stack())
|
||||
|
||||
queryJSON, _ := json.Marshal(queryRangeRequest)
|
||||
|
||||
aH.Signoz.Instrumentation.Logger().ErrorContext(ctx, "panic in QueryRange",
|
||||
"error", r,
|
||||
"user", claims.UserID,
|
||||
"payload", string(queryJSON),
|
||||
"stacktrace", stackTrace,
|
||||
)
|
||||
|
||||
render.Error(rw, errors.NewInternalf(
|
||||
errors.CodeInternal,
|
||||
"Something went wrong on our end. It's not you, it's us. Our team is notified about it. Reach out to support if issue persists.",
|
||||
))
|
||||
}
|
||||
}()
|
||||
|
||||
if err := queryRangeRequest.Validate(); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := aH.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
results := []any{}
|
||||
for _, item := range anomalies.Results {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
return
|
||||
} else {
|
||||
// regular query range request, let the querier handle it
|
||||
req.Body = io.NopCloser(bytes.NewBuffer(bodyBytes))
|
||||
aH.QuerierAPI.QueryRange(rw, req)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/app/api"
|
||||
"github.com/SigNoz/signoz/ee/query-service/integrations/gateway"
|
||||
"github.com/SigNoz/signoz/ee/query-service/rules"
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
@@ -72,11 +71,6 @@ type Server struct {
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
@@ -170,12 +164,11 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
CloudIntegrationsController: cloudIntegrationsController,
|
||||
LogsParsingPipelineController: logParsingPipelineController,
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -237,11 +230,9 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.RegisterLogsRoutes(r, am)
|
||||
apiHandler.RegisterIntegrationRoutes(r, am)
|
||||
apiHandler.RegisterCloudIntegrationsRoutes(r, am)
|
||||
apiHandler.RegisterFieldsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV3Routes(r, am)
|
||||
apiHandler.RegisterInfraMetricsRoutes(r, am)
|
||||
apiHandler.RegisterQueryRangeV4Routes(r, am)
|
||||
apiHandler.RegisterQueryRangeV5Routes(r, am)
|
||||
apiHandler.RegisterWebSocketPaths(r, am)
|
||||
apiHandler.RegisterMessagingQueuesRoutes(r, am)
|
||||
apiHandler.RegisterThirdPartyApiRoutes(r, am)
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http/httputil"
|
||||
)
|
||||
|
||||
func NewNoopProxy() (*httputil.ReverseProxy, error) {
|
||||
return &httputil.ReverseProxy{}, nil
|
||||
}
|
||||
@@ -1,66 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
RoutePrefix string = "/api/gateway"
|
||||
AllowedPrefix []string = []string{"/v1/workspaces/me", "/v2/profiles/me", "/v2/deployments/me"}
|
||||
)
|
||||
|
||||
type proxy struct {
|
||||
url *url.URL
|
||||
stripPath string
|
||||
}
|
||||
|
||||
func NewProxy(u string, stripPath string) (*httputil.ReverseProxy, error) {
|
||||
url, err := url.Parse(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proxy := &proxy{url: url, stripPath: stripPath}
|
||||
|
||||
return &httputil.ReverseProxy{
|
||||
Rewrite: proxy.rewrite,
|
||||
ModifyResponse: proxy.modifyResponse,
|
||||
ErrorHandler: proxy.errorHandler,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *proxy) rewrite(pr *httputil.ProxyRequest) {
|
||||
pr.SetURL(p.url)
|
||||
pr.SetXForwarded()
|
||||
pr.Out.URL.Path = cleanPath(strings.ReplaceAll(pr.Out.URL.Path, p.stripPath, ""))
|
||||
}
|
||||
|
||||
func (p *proxy) modifyResponse(res *http.Response) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *proxy) errorHandler(rw http.ResponseWriter, req *http.Request, err error) {
|
||||
rw.WriteHeader(http.StatusBadGateway)
|
||||
}
|
||||
|
||||
func cleanPath(p string) string {
|
||||
if p == "" {
|
||||
return "/"
|
||||
}
|
||||
if p[0] != '/' {
|
||||
p = "/" + p
|
||||
}
|
||||
np := path.Clean(p)
|
||||
if p[len(p)-1] == '/' && np != "/" {
|
||||
if len(p) == len(np)+1 && strings.HasPrefix(p, np) {
|
||||
np = p
|
||||
} else {
|
||||
np += "/"
|
||||
}
|
||||
}
|
||||
return np
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package gateway
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestProxyRewrite(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
url *url.URL
|
||||
stripPath string
|
||||
in *url.URL
|
||||
expected *url.URL
|
||||
}{
|
||||
{
|
||||
name: "SamePathAdded",
|
||||
url: &url.URL{Scheme: "http", Host: "backend", Path: "/path1"},
|
||||
stripPath: "/strip",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/path1/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathInput",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
{
|
||||
name: "NoStripPathPresentInReq",
|
||||
url: &url.URL{Scheme: "http", Host: "backend"},
|
||||
stripPath: "/not-found",
|
||||
in: &url.URL{Scheme: "http", Host: "localhost", Path: "/strip/path1"},
|
||||
expected: &url.URL{Scheme: "http", Host: "backend", Path: "/strip/path1"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
proxy, err := NewProxy(tc.url.String(), tc.stripPath)
|
||||
require.NoError(t, err)
|
||||
inReq, err := http.NewRequest(http.MethodGet, tc.in.String(), nil)
|
||||
require.NoError(t, err)
|
||||
proxyReq := &httputil.ProxyRequest{
|
||||
In: inReq,
|
||||
Out: inReq.Clone(context.Background()),
|
||||
}
|
||||
proxy.Rewrite(proxyReq)
|
||||
|
||||
assert.Equal(t, tc.expected.Host, proxyReq.Out.URL.Host)
|
||||
assert.Equal(t, tc.expected.Scheme, proxyReq.Out.URL.Scheme)
|
||||
assert.Equal(t, tc.expected.Path, proxyReq.Out.URL.Path)
|
||||
assert.Equal(t, tc.expected.Query(), proxyReq.Out.URL.Query())
|
||||
}
|
||||
}
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
@@ -63,6 +63,8 @@ type AnomalyRule struct {
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
|
||||
var _ baserules.Rule = (*AnomalyRule)(nil)
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -234,6 +236,11 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
@@ -285,6 +292,11 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
@@ -480,7 +492,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
@@ -543,7 +555,7 @@ func (r *AnomalyRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: ruletypes.Duration(r.EvalWindow()),
|
||||
EvalWindow: r.EvalWindow(),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
|
||||
268
ee/query-service/rules/anomaly_test.go
Normal file
268
ee/query-service/rules/anomaly_test.go
Normal file
@@ -0,0 +1,268 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
// We need this because the anomaly provider makes 6 different queries for various
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
alertOnAbsent bool
|
||||
expectAlerts int
|
||||
}{
|
||||
{
|
||||
description: "AlertOnAbsent=false",
|
||||
alertOnAbsent: false,
|
||||
expectAlerts: 0,
|
||||
},
|
||||
{
|
||||
description: "AlertOnAbsent=true",
|
||||
alertOnAbsent: true,
|
||||
expectAlerts: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AlertOnAbsent = c.alertOnAbsent
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule(
|
||||
"test-anomaly-rule",
|
||||
valuer.GenerateUUID(),
|
||||
&postableRule,
|
||||
reader,
|
||||
nil,
|
||||
logger,
|
||||
nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlerts, alertsFound)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
// Test missing data alert with AbsentFor grace period
|
||||
// 1. Call Eval with data at time t1, to populate lastTimestampWithDatapoints
|
||||
// 2. Call Eval without data at time t2
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
|
||||
// Set target higher than test data so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data with AbsentFor",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
absentFor uint64
|
||||
timeBetweenEvals time.Duration
|
||||
expectAlertOnEval2 int
|
||||
}{
|
||||
{
|
||||
description: "WithinGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 4 * time.Minute,
|
||||
expectAlertOnEval2: 0,
|
||||
},
|
||||
{
|
||||
description: "AfterGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 6 * time.Minute,
|
||||
expectAlertOnEval2: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AbsentFor = c.absentFor
|
||||
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, alertsFound1, "First eval with data should not alert")
|
||||
|
||||
alertsFound2, err := rule.Eval(context.Background(), t2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlertOnEval2, alertsFound2)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
@@ -213,8 +213,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
// newTask returns an appropriate group for the rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
|
||||
@@ -3,6 +3,7 @@ package httpzeus
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
@@ -10,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
@@ -119,8 +121,13 @@ func (provider *Provider) PutMeters(ctx context.Context, key string, data []byte
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
func (provider *Provider) PutProfile(ctx context.Context, key string, profile *zeustypes.PostableProfile) error {
|
||||
body, err := json.Marshal(profile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/profiles/me"),
|
||||
http.MethodPut,
|
||||
@@ -131,10 +138,15 @@ func (provider *Provider) PutProfile(ctx context.Context, key string, body []byt
|
||||
return err
|
||||
}
|
||||
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, body []byte) error {
|
||||
_, err := provider.do(
|
||||
func (provider *Provider) PutHost(ctx context.Context, key string, host *zeustypes.PostableHost) error {
|
||||
body, err := json.Marshal(host)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(
|
||||
ctx,
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/hosts"),
|
||||
provider.config.URL.JoinPath("/v2/deployments/me/host"),
|
||||
http.MethodPut,
|
||||
key,
|
||||
body,
|
||||
@@ -169,21 +181,28 @@ func (provider *Provider) do(ctx context.Context, url *url.URL, method string, k
|
||||
return body, nil
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode)
|
||||
errorMessage := gjson.GetBytes(body, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
}
|
||||
|
||||
// This can be taken down to the client package
|
||||
func (provider *Provider) errFromStatusCode(statusCode int) error {
|
||||
func (provider *Provider) errFromStatusCode(statusCode int, errorMessage string) error {
|
||||
switch statusCode {
|
||||
case http.StatusBadRequest:
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "bad request")
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
case http.StatusUnauthorized:
|
||||
return errors.Newf(errors.TypeUnauthenticated, errors.CodeUnauthenticated, "unauthenticated")
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
case http.StatusForbidden:
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "forbidden")
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
case http.StatusNotFound:
|
||||
return errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "not found")
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
}
|
||||
|
||||
return errors.Newf(errors.TypeInternal, errors.CodeInternal, "internal")
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
}
|
||||
|
||||
74
frontend/.cursor/rules/testing-conventions.mdc
Normal file
74
frontend/.cursor/rules/testing-conventions.mdc
Normal file
@@ -0,0 +1,74 @@
|
||||
---
|
||||
description: Core testing conventions - imports, rendering, MSW, interactions, queries
|
||||
globs: **/*.test.{ts,tsx}
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Testing Conventions
|
||||
|
||||
## Imports
|
||||
|
||||
Always import from the test harness, never directly from `@testing-library/react`:
|
||||
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
```
|
||||
|
||||
## Router
|
||||
|
||||
Use the built-in router in `render`:
|
||||
|
||||
```ts
|
||||
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
|
||||
```
|
||||
|
||||
Only mock `useLocation` / `useParams` if the test depends on their values.
|
||||
|
||||
## MSW
|
||||
|
||||
Global MSW server runs automatically. Override per-test:
|
||||
|
||||
```ts
|
||||
server.use(
|
||||
rest.get('*/api/v1/foo', (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json({ ok: true })))
|
||||
);
|
||||
```
|
||||
|
||||
Keep large response fixtures in `mocks-server/__mockdata_`.
|
||||
|
||||
## Interactions
|
||||
|
||||
- Prefer `userEvent` for real user interactions (click, type, select, tab).
|
||||
- Use `fireEvent` only for low-level events not covered by `userEvent` (e.g., scroll, resize). Wrap in `act(...)` if needed.
|
||||
- Always `await` interactions:
|
||||
|
||||
```ts
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(screen.getByRole('button', { name: /save/i }));
|
||||
```
|
||||
|
||||
## Timers
|
||||
|
||||
No global fake timers. Per-test only, for debounce/throttle:
|
||||
|
||||
```ts
|
||||
jest.useFakeTimers();
|
||||
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
|
||||
await user.type(screen.getByRole('textbox'), 'query');
|
||||
jest.advanceTimersByTime(400);
|
||||
jest.useRealTimers();
|
||||
```
|
||||
|
||||
## Queries
|
||||
|
||||
Prefer accessible queries: `getByRole` > `findByRole` > `getByLabelText` > visible text > `data-testid` (last resort).
|
||||
|
||||
## Anti-patterns
|
||||
|
||||
- Never import from `@testing-library/react` directly
|
||||
- Never use global fake timers
|
||||
- Never wrap `render` in `act(...)`
|
||||
- Never mock infra dependencies locally (router, react-query)
|
||||
- Limit to 3-5 focused tests per file
|
||||
54
frontend/.cursor/rules/testing-mock-strategy.mdc
Normal file
54
frontend/.cursor/rules/testing-mock-strategy.mdc
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
description: When to use global vs local mocks in tests
|
||||
globs: **/*.test.{ts,tsx}
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Mock Strategy
|
||||
|
||||
## Use Global Mocks For
|
||||
|
||||
High-frequency dependencies (20+ test files):
|
||||
- Core infrastructure: react-router-dom, react-query, antd
|
||||
- Browser APIs: ResizeObserver, matchMedia, localStorage
|
||||
- Utility libraries: date-fns, lodash
|
||||
|
||||
Available global mock files (from jest.config.ts):
|
||||
- `uplot` -> `__mocks__/uplotMock.ts`
|
||||
|
||||
## Use Local Mocks For
|
||||
|
||||
- Business logic dependencies (API endpoints, custom hooks, domain components)
|
||||
- Test-specific behavior (different data per test, error scenarios, loading states)
|
||||
|
||||
## Decision Tree
|
||||
|
||||
```
|
||||
Used in 20+ test files?
|
||||
YES -> Global mock
|
||||
NO -> Business logic or test-specific?
|
||||
YES -> Local mock
|
||||
NO -> Consider global if usage grows
|
||||
```
|
||||
|
||||
## Correct Usage
|
||||
|
||||
```ts
|
||||
// Global mocks are already available - just import
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
// Local mocks for business logic
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => mockTracesData),
|
||||
}));
|
||||
```
|
||||
|
||||
## Anti-patterns
|
||||
|
||||
```ts
|
||||
// Never re-mock globally mocked dependencies locally
|
||||
jest.mock('react-router-dom', () => ({ ... }));
|
||||
|
||||
// Never put test-specific data in global mocks
|
||||
jest.mock('../api/tracesService', () => ({ getTraces: jest.fn(() => specificTestData) }));
|
||||
```
|
||||
54
frontend/.cursor/rules/testing-type-safety.mdc
Normal file
54
frontend/.cursor/rules/testing-type-safety.mdc
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
description: TypeScript type safety requirements for Jest tests
|
||||
globs: **/*.test.{ts,tsx}
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# TypeScript Type Safety in Tests
|
||||
|
||||
All Jest tests must be fully type-safe. Never use `any`.
|
||||
|
||||
## Mock Function Typing
|
||||
|
||||
```ts
|
||||
// Use jest.mocked for module mocks
|
||||
import useFoo from 'hooks/useFoo';
|
||||
jest.mock('hooks/useFoo');
|
||||
const mockUseFoo = jest.mocked(useFoo);
|
||||
|
||||
// Use jest.MockedFunction for standalone mocks
|
||||
const mockFetch = jest.fn() as jest.MockedFunction<(id: number) => Promise<User>>;
|
||||
```
|
||||
|
||||
## Mock Data
|
||||
|
||||
Define interfaces for all mock data:
|
||||
|
||||
```ts
|
||||
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
|
||||
|
||||
const mockProps: ComponentProps = {
|
||||
title: 'Test',
|
||||
data: [mockUser],
|
||||
onSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
|
||||
};
|
||||
```
|
||||
|
||||
## Hook Mocking Pattern
|
||||
|
||||
```ts
|
||||
import useFoo from 'hooks/useFoo';
|
||||
jest.mock('hooks/useFoo');
|
||||
const mockUseFoo = jest.mocked(useFoo);
|
||||
mockUseFoo.mockReturnValue(/* minimal shape */);
|
||||
```
|
||||
|
||||
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
|
||||
|
||||
## Checklist
|
||||
|
||||
- All mock functions use `jest.MockedFunction<T>` or `jest.mocked()`
|
||||
- All mock data has proper interfaces
|
||||
- No `any` types in test files
|
||||
- Component props are typed
|
||||
- API response types are defined
|
||||
@@ -1,484 +0,0 @@
|
||||
# Persona
|
||||
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
|
||||
|
||||
# Auto-detect TypeScript Usage
|
||||
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
|
||||
Adjust syntax based on this detection.
|
||||
|
||||
# TypeScript Type Safety for Jest Tests
|
||||
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
|
||||
|
||||
**Type Safety Requirements:**
|
||||
- Use proper TypeScript interfaces for all mock data
|
||||
- Type all Jest mock functions with `jest.MockedFunction<T>`
|
||||
- Use generic types for React components and hooks
|
||||
- Define proper return types for mock functions
|
||||
- Use `as const` for literal types when needed
|
||||
- Avoid `any` type – use proper typing instead
|
||||
|
||||
# Unit Testing Focus
|
||||
Focus on critical functionality (business logic, utility functions, component behavior)
|
||||
Mock dependencies (API calls, external modules) before imports
|
||||
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
|
||||
Write maintainable tests with descriptive names grouped in describe blocks
|
||||
|
||||
# Global vs Local Mocks
|
||||
**Use Global Mocks for:**
|
||||
- High-frequency dependencies (20+ test files)
|
||||
- Core infrastructure (react-router-dom, react-query, antd)
|
||||
- Standard implementations across the app
|
||||
- Browser APIs (ResizeObserver, matchMedia, localStorage)
|
||||
- Utility libraries (date-fns, lodash)
|
||||
|
||||
**Use Local Mocks for:**
|
||||
- Business logic dependencies (5-15 test files)
|
||||
- Test-specific behavior (different data per test)
|
||||
- API endpoints with specific responses
|
||||
- Domain-specific components
|
||||
- Error scenarios and edge cases
|
||||
|
||||
**Global Mock Files Available (from jest.config.ts):**
|
||||
- `uplot` → `__mocks__/uplotMock.ts`
|
||||
|
||||
# Repo-specific Testing Conventions
|
||||
|
||||
## Imports
|
||||
Always import from our harness:
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
```
|
||||
For API mocks:
|
||||
```ts
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
```
|
||||
Do not import directly from `@testing-library/react`.
|
||||
|
||||
## Router
|
||||
Use the router built into render:
|
||||
```ts
|
||||
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
|
||||
```
|
||||
Only mock `useLocation` / `useParams` if the test depends on them.
|
||||
|
||||
## Hook Mocks
|
||||
Pattern:
|
||||
```ts
|
||||
import useFoo from 'hooks/useFoo';
|
||||
jest.mock('hooks/useFoo');
|
||||
const mockUseFoo = jest.mocked(useFoo);
|
||||
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
|
||||
```
|
||||
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
|
||||
|
||||
## MSW
|
||||
Global MSW server runs automatically.
|
||||
Override per-test:
|
||||
```ts
|
||||
server.use(
|
||||
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
|
||||
);
|
||||
```
|
||||
Keep large responses in `mocks-server/__mockdata_`.
|
||||
|
||||
## Interactions
|
||||
- Prefer `userEvent` for real user interactions (click, type, select, tab).
|
||||
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
|
||||
- Always await interactions:
|
||||
```ts
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(screen.getByRole('button', { name: /save/i }));
|
||||
```
|
||||
|
||||
```ts
|
||||
// Example: virtualized list scroll (no userEvent helper)
|
||||
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
|
||||
scroller.scrollTop = targetScrollTop;
|
||||
act(() => { fireEvent.scroll(scroller); });
|
||||
```
|
||||
|
||||
## Timers
|
||||
❌ No global fake timers.
|
||||
✅ Per-test only, for debounce/throttle:
|
||||
```ts
|
||||
jest.useFakeTimers();
|
||||
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
|
||||
await user.type(screen.getByRole('textbox'), 'query');
|
||||
jest.advanceTimersByTime(400);
|
||||
jest.useRealTimers();
|
||||
```
|
||||
|
||||
## Queries
|
||||
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
|
||||
Fallback: visible text.
|
||||
Last resort: `data-testid`.
|
||||
|
||||
# Example Test (using only configured global mocks)
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
import MyComponent from '../MyComponent';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders and interacts', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
|
||||
);
|
||||
|
||||
render(<MyComponent />, undefined, { initialRoute: '/foo' });
|
||||
|
||||
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
|
||||
await user.click(screen.getByRole('button', { name: /refresh/i }));
|
||||
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
# Anti-patterns
|
||||
❌ Importing RTL directly
|
||||
❌ Using global fake timers
|
||||
❌ Wrapping render in `act(...)`
|
||||
❌ Mocking infra dependencies locally (router, react-query)
|
||||
✅ Use our harness (`tests/test-utils`)
|
||||
✅ Use MSW for API overrides
|
||||
✅ Use userEvent + await
|
||||
✅ Pin time only in tests that assert relative dates
|
||||
|
||||
# Best Practices
|
||||
- **Critical Functionality**: Prioritize testing business logic and utilities
|
||||
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
|
||||
- **Data Scenarios**: Always test valid, invalid, and edge cases
|
||||
- **Descriptive Names**: Make test intent clear
|
||||
- **Organization**: Group related tests in describe
|
||||
- **Consistency**: Match repo conventions
|
||||
- **Edge Cases**: Test null, undefined, unexpected values
|
||||
- **Limit Scope**: 3–5 focused tests per file
|
||||
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
|
||||
- **No Any**: Enforce type safety
|
||||
|
||||
# Example Test
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
import MyComponent from '../MyComponent';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders and interacts', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
|
||||
);
|
||||
|
||||
render(<MyComponent />, undefined, { initialRoute: '/foo' });
|
||||
|
||||
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
|
||||
await user.click(screen.getByRole('button', { name: /refresh/i }));
|
||||
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
# Anti-patterns
|
||||
❌ Importing RTL directly
|
||||
❌ Using global fake timers
|
||||
❌ Wrapping render in `act(...)`
|
||||
❌ Mocking infra dependencies locally (router, react-query)
|
||||
✅ Use our harness (`tests/test-utils`)
|
||||
✅ Use MSW for API overrides
|
||||
✅ Use userEvent + await
|
||||
✅ Pin time only in tests that assert relative dates
|
||||
|
||||
# TypeScript Type Safety Examples
|
||||
|
||||
## Proper Mock Typing
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed mocks
|
||||
interface User {
|
||||
id: number;
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
interface ApiResponse<T> {
|
||||
data: T;
|
||||
status: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
// Type the mock functions
|
||||
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
|
||||
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
|
||||
|
||||
// Mock implementation with proper typing
|
||||
mockFetchUser.mockResolvedValue({
|
||||
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
|
||||
status: 200,
|
||||
message: 'Success'
|
||||
});
|
||||
|
||||
// ❌ BAD - Using any type
|
||||
const mockFetchUser = jest.fn() as any; // Don't do this
|
||||
```
|
||||
|
||||
## React Component Testing with Types
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed component testing
|
||||
interface ComponentProps {
|
||||
title: string;
|
||||
data: User[];
|
||||
onUserSelect: (user: User) => void;
|
||||
isLoading?: boolean;
|
||||
}
|
||||
|
||||
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
|
||||
// Component implementation
|
||||
};
|
||||
|
||||
describe('TestComponent', () => {
|
||||
it('should render with proper props', () => {
|
||||
// Arrange - Type the props properly
|
||||
const mockProps: ComponentProps = {
|
||||
title: 'Test Title',
|
||||
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
|
||||
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
|
||||
isLoading: false
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<TestComponent {...mockProps} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Test Title')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Hook Testing with Types
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed hook testing
|
||||
interface UseUserDataReturn {
|
||||
user: User | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
const useUserData = (id: number): UseUserDataReturn => {
|
||||
// Hook implementation
|
||||
};
|
||||
|
||||
describe('useUserData', () => {
|
||||
it('should return user data with proper typing', () => {
|
||||
// Arrange
|
||||
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
|
||||
mockFetchUser.mockResolvedValue({
|
||||
data: mockUser,
|
||||
status: 200,
|
||||
message: 'Success'
|
||||
});
|
||||
|
||||
// Act
|
||||
const { result } = renderHook(() => useUserData(1));
|
||||
|
||||
// Assert
|
||||
expect(result.current.user).toEqual(mockUser);
|
||||
expect(result.current.loading).toBe(false);
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Global Mock Type Safety
|
||||
```ts
|
||||
// ✅ GOOD - Type-safe global mocks
|
||||
// In __mocks__/routerMock.ts
|
||||
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
|
||||
pathname: '/traces',
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
key: 'test-key',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
// In test files
|
||||
const location = useLocation(); // Properly typed from global mock
|
||||
expect(location.pathname).toBe('/traces');
|
||||
```
|
||||
|
||||
# TypeScript Configuration for Jest
|
||||
|
||||
## Required Jest Configuration
|
||||
```json
|
||||
// jest.config.ts
|
||||
{
|
||||
"preset": "ts-jest/presets/js-with-ts-esm",
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"useESM": true,
|
||||
"isolatedModules": true,
|
||||
"tsconfig": "<rootDir>/tsconfig.jest.json"
|
||||
}
|
||||
},
|
||||
"extensionsToTreatAsEsm": [".ts", ".tsx"],
|
||||
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
|
||||
}
|
||||
```
|
||||
|
||||
## TypeScript Jest Configuration
|
||||
```json
|
||||
// tsconfig.jest.json
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": ["jest", "@testing-library/jest-dom"],
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "node"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*",
|
||||
"**/*.test.ts",
|
||||
"**/*.test.tsx",
|
||||
"__mocks__/**/*"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Common Type Safety Patterns
|
||||
|
||||
### Mock Function Typing
|
||||
```ts
|
||||
// ✅ GOOD - Proper mock function typing
|
||||
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
|
||||
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
|
||||
|
||||
// ❌ BAD - Using any
|
||||
const mockApiCall = jest.fn() as any;
|
||||
```
|
||||
|
||||
### Generic Mock Typing
|
||||
```ts
|
||||
// ✅ GOOD - Generic mock typing
|
||||
interface MockApiResponse<T> {
|
||||
data: T;
|
||||
status: number;
|
||||
}
|
||||
|
||||
const mockFetchData = jest.fn() as jest.MockedFunction<
|
||||
<T>(endpoint: string) => Promise<MockApiResponse<T>>
|
||||
>;
|
||||
|
||||
// Usage
|
||||
mockFetchData<User>('/users').mockResolvedValue({
|
||||
data: { id: 1, name: 'John' },
|
||||
status: 200
|
||||
});
|
||||
```
|
||||
|
||||
### React Testing Library with Types
|
||||
```ts
|
||||
// ✅ GOOD - Typed testing utilities
|
||||
import { render, screen, RenderResult } from '@testing-library/react';
|
||||
import { ComponentProps } from 'react';
|
||||
|
||||
type TestComponentProps = ComponentProps<typeof TestComponent>;
|
||||
|
||||
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
|
||||
const defaultProps: TestComponentProps = {
|
||||
title: 'Test',
|
||||
data: [],
|
||||
onSelect: jest.fn(),
|
||||
...props
|
||||
};
|
||||
|
||||
return render(<TestComponent {...defaultProps} />);
|
||||
};
|
||||
```
|
||||
|
||||
### Error Handling with Types
|
||||
```ts
|
||||
// ✅ GOOD - Typed error handling
|
||||
interface ApiError {
|
||||
message: string;
|
||||
code: number;
|
||||
details?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const mockApiError: ApiError = {
|
||||
message: 'API Error',
|
||||
code: 500,
|
||||
details: { endpoint: '/users' }
|
||||
};
|
||||
|
||||
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
|
||||
```
|
||||
|
||||
## Type Safety Checklist
|
||||
- [ ] All mock functions use `jest.MockedFunction<T>`
|
||||
- [ ] All mock data has proper interfaces
|
||||
- [ ] No `any` types in test files
|
||||
- [ ] Generic types are used where appropriate
|
||||
- [ ] Error types are properly defined
|
||||
- [ ] Component props are typed
|
||||
- [ ] Hook return types are defined
|
||||
- [ ] API response types are defined
|
||||
- [ ] Global mocks are type-safe
|
||||
- [ ] Test utilities are properly typed
|
||||
|
||||
# Mock Decision Tree
|
||||
```
|
||||
Is it used in 20+ test files?
|
||||
├─ YES → Use Global Mock
|
||||
│ ├─ react-router-dom
|
||||
│ ├─ react-query
|
||||
│ ├─ antd components
|
||||
│ └─ browser APIs
|
||||
│
|
||||
└─ NO → Is it business logic?
|
||||
├─ YES → Use Local Mock
|
||||
│ ├─ API endpoints
|
||||
│ ├─ Custom hooks
|
||||
│ └─ Domain components
|
||||
│
|
||||
└─ NO → Is it test-specific?
|
||||
├─ YES → Use Local Mock
|
||||
│ ├─ Error scenarios
|
||||
│ ├─ Loading states
|
||||
│ └─ Specific data
|
||||
│
|
||||
└─ NO → Consider Global Mock
|
||||
└─ If it becomes frequently used
|
||||
```
|
||||
|
||||
# Common Anti-Patterns to Avoid
|
||||
|
||||
❌ **Don't mock global dependencies locally:**
|
||||
```js
|
||||
// BAD - This is already globally mocked
|
||||
jest.mock('react-router-dom', () => ({ ... }));
|
||||
```
|
||||
|
||||
❌ **Don't create global mocks for test-specific data:**
|
||||
```js
|
||||
// BAD - This should be local
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => specificTestData)
|
||||
}));
|
||||
```
|
||||
|
||||
✅ **Do use global mocks for infrastructure:**
|
||||
```js
|
||||
// GOOD - Use global mock
|
||||
import { useLocation } from 'react-router-dom';
|
||||
```
|
||||
|
||||
✅ **Do create local mocks for business logic:**
|
||||
```js
|
||||
// GOOD - Local mock for specific test needs
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => mockTracesData)
|
||||
}));
|
||||
```
|
||||
@@ -61,6 +61,8 @@ module.exports = {
|
||||
curly: 'error', // Requires curly braces for all control statements
|
||||
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
|
||||
// TODO: Change this to error in May 2026
|
||||
'max-params': ['warn', 3], // a function can have max 3 params after which it should become an object
|
||||
|
||||
// TypeScript rules
|
||||
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
|
||||
@@ -116,23 +118,22 @@ module.exports = {
|
||||
},
|
||||
],
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: true }], // Prevents importing packages not in package.json
|
||||
// 'import/no-cycle': 'warn', // TODO: Enable later to detect circular dependencies
|
||||
'import/no-cycle': 'warn', // Warns about circular dependencies
|
||||
|
||||
// TODO: Enable in separate PR with auto fixes
|
||||
// // Import sorting rules
|
||||
// 'simple-import-sort/imports': [
|
||||
// 'error',
|
||||
// {
|
||||
// groups: [
|
||||
// ['^react', '^@?\\w'], // React first, then external packages
|
||||
// ['^@/'], // Absolute imports with @ alias
|
||||
// ['^\\u0000'], // Side effect imports (import './file')
|
||||
// ['^\\.'], // Relative imports
|
||||
// ['^.+\\.s?css$'], // Style imports
|
||||
// ],
|
||||
// },
|
||||
// ],
|
||||
// 'simple-import-sort/exports': 'error', // Auto-sorts exports
|
||||
// Import sorting rules
|
||||
'simple-import-sort/imports': [
|
||||
'error',
|
||||
{
|
||||
groups: [
|
||||
['^react', '^@?\\w'], // React first, then external packages
|
||||
['^@/'], // Absolute imports with @ alias
|
||||
['^\\u0000'], // Side effect imports (import './file')
|
||||
['^\\.'], // Relative imports
|
||||
['^.+\\.s?css$'], // Style imports
|
||||
],
|
||||
},
|
||||
],
|
||||
'simple-import-sort/exports': 'error', // Auto-sorts exports
|
||||
|
||||
// Prettier - code formatting
|
||||
'prettier/prettier': [
|
||||
@@ -147,6 +148,19 @@ module.exports = {
|
||||
'sonarjs/no-duplicate-string': 'off', // Disabled - can be noisy (enable periodically to check)
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: [
|
||||
'**/*.test.{js,jsx,ts,tsx}',
|
||||
'**/*.spec.{js,jsx,ts,tsx}',
|
||||
'**/__tests__/**/*.{js,jsx,ts,tsx}',
|
||||
],
|
||||
rules: {
|
||||
// Tests often have intentional duplication and complexity - disable SonarJS rules
|
||||
'sonarjs/cognitive-complexity': 'off', // Tests can be complex
|
||||
'sonarjs/no-identical-functions': 'off', // Similar test patterns are OK
|
||||
'sonarjs/no-small-switch': 'off', // Small switches are OK in tests
|
||||
},
|
||||
},
|
||||
{
|
||||
files: ['src/api/generated/**/*.ts'],
|
||||
rules: {
|
||||
@@ -154,7 +168,6 @@ module.exports = {
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'no-nested-ternary': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'warn',
|
||||
'sonarjs/no-duplicate-string': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -2,6 +2,11 @@
|
||||
|
||||
Embrace the spirit of collaboration and contribute to the success of our open-source project by adhering to these frontend development guidelines with precision and passion.
|
||||
|
||||
### Export Style
|
||||
|
||||
- **React components** (`src/components/`, `src/container/`, `src/pages/`): Prefer **default exports** for the main component in each file
|
||||
- **Utilities, hooks, APIs, types, constants** (`src/utils/`, `src/hooks/`, `src/api/`, `src/lib/`, `src/types/`, `src/constants/`): Prefer **named exports** for better tree-shaking and explicit imports
|
||||
|
||||
### React and Components
|
||||
|
||||
- Strive to create small and modular components, ensuring they are divided into individual pieces for improved maintainability and reusability.
|
||||
|
||||
@@ -12,6 +12,8 @@ export interface MockUPlotInstance {
|
||||
export interface MockUPlotPaths {
|
||||
spline: jest.Mock;
|
||||
bars: jest.Mock;
|
||||
linear: jest.Mock;
|
||||
stepped: jest.Mock;
|
||||
}
|
||||
|
||||
// Create mock instance methods
|
||||
@@ -23,10 +25,23 @@ const createMockUPlotInstance = (): MockUPlotInstance => ({
|
||||
setSeries: jest.fn(),
|
||||
});
|
||||
|
||||
// Create mock paths
|
||||
const mockPaths: MockUPlotPaths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
// Path builder: (self, seriesIdx, idx0, idx1) => paths or null
|
||||
const createMockPathBuilder = (name: string): jest.Mock =>
|
||||
jest.fn(() => ({
|
||||
name, // To test if the correct pathBuilder is used
|
||||
stroke: jest.fn(),
|
||||
fill: jest.fn(),
|
||||
clip: jest.fn(),
|
||||
}));
|
||||
|
||||
// Create mock paths - linear, spline, stepped needed by UPlotSeriesBuilder.getPathBuilder
|
||||
const mockPaths = {
|
||||
spline: jest.fn(() => createMockPathBuilder('spline')),
|
||||
bars: jest.fn(() => createMockPathBuilder('bars')),
|
||||
linear: jest.fn(() => createMockPathBuilder('linear')),
|
||||
stepped: jest.fn((opts?: { align?: number }) =>
|
||||
createMockPathBuilder(`stepped-(${opts?.align ?? 0})`),
|
||||
),
|
||||
};
|
||||
|
||||
// Mock static methods
|
||||
|
||||
@@ -17,6 +17,8 @@ const config: Config.InitialOptions = {
|
||||
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^@signozhq/icons$':
|
||||
'<rootDir>/node_modules/@signozhq/icons/dist/index.esm.js',
|
||||
},
|
||||
globals: {
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent",
|
||||
"generate:api": "orval --config ./orval.config.ts && sh scripts/post-types-generation.sh && prettier --write src/api/generated && (eslint --fix src/api/generated || true)"
|
||||
"generate:api": "orval --config ./orval.config.ts && sh scripts/post-types-generation.sh"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0"
|
||||
@@ -51,11 +51,14 @@
|
||||
"@signozhq/checkbox": "0.0.2",
|
||||
"@signozhq/combobox": "0.0.2",
|
||||
"@signozhq/command": "0.0.0",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/design-tokens": "2.1.1",
|
||||
"@signozhq/icons": "0.1.0",
|
||||
"@signozhq/input": "0.0.2",
|
||||
"@signozhq/popover": "0.0.0",
|
||||
"@signozhq/radio-group": "0.0.2",
|
||||
"@signozhq/resizable": "0.0.0",
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/switch": "0.0.2",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
@@ -105,6 +108,7 @@
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
"immer": "11.1.3",
|
||||
"jest": "^27.5.1",
|
||||
"js-base64": "^3.7.2",
|
||||
"less": "^4.1.2",
|
||||
|
||||
1
frontend/public/Logos/convex-logo.svg
Normal file
1
frontend/public/Logos/convex-logo.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="184" height="188" fill="none" viewBox="0 0 184 188"><path fill="#f3b01c" d="M108.092 130.021c18.166-2.018 35.293-11.698 44.723-27.854-4.466 39.961-48.162 65.218-83.83 49.711-3.286-1.425-6.115-3.796-8.056-6.844-8.016-12.586-10.65-28.601-6.865-43.135 10.817 18.668 32.81 30.111 54.028 28.122"/><path fill="#8d2676" d="M53.401 90.174c-7.364 17.017-7.682 36.94 1.345 53.336-31.77-23.902-31.423-75.052-.388-98.715 2.87-2.187 6.282-3.485 9.86-3.683 14.713-.776 29.662 4.91 40.146 15.507-21.3.212-42.046 13.857-50.963 33.555"/><path fill="#ee342f" d="M114.637 61.855C103.89 46.87 87.069 36.668 68.639 36.358c35.625-16.17 79.446 10.047 84.217 48.807.444 3.598-.139 7.267-1.734 10.512-6.656 13.518-18.998 24.002-33.42 27.882 10.567-19.599 9.263-43.544-3.065-61.704"/></svg>
|
||||
|
After Width: | Height: | Size: 811 B |
3
frontend/public/Logos/google-adk.svg
Normal file
3
frontend/public/Logos/google-adk.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 11 KiB |
1
frontend/public/Logos/groq.svg
Normal file
1
frontend/public/Logos/groq.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg fill="currentColor" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Groq</title><path d="M12.036 2c-3.853-.035-7 3-7.036 6.781-.035 3.782 3.055 6.872 6.908 6.907h2.42v-2.566h-2.292c-2.407.028-4.38-1.866-4.408-4.23-.029-2.362 1.901-4.298 4.308-4.326h.1c2.407 0 4.358 1.915 4.365 4.278v6.305c0 2.342-1.944 4.25-4.323 4.279a4.375 4.375 0 01-3.033-1.252l-1.851 1.818A7 7 0 0012.029 22h.092c3.803-.056 6.858-3.083 6.879-6.816v-6.5C18.907 4.963 15.817 2 12.036 2z"></path></svg>
|
||||
|
After Width: | Height: | Size: 568 B |
3
frontend/public/Logos/langtrace.svg
Normal file
3
frontend/public/Logos/langtrace.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="200" height="200" viewBox="0 0 200 200" preserveAspectRatio="xMidYMid meet">
|
||||
<image width="200" height="200" href="data:image/webp;base64,UklGRuINAABXRUJQVlA4WAoAAAAQAAAAxwAAxwAAQUxQSEMFAAABoLz+/6M4+7ogM2lSYJ6SJrj4L3h1+x5SBO+Snph+38I8vfiU9N6jsEdTPG16Ipue7Jree0QYnirID2f5HmxTfr/v7xoREwBc9nwVjaf0/bnzd599NiqVaaxUyqVnty/l9uupeFQDGqs9wzkDbTbO6P0qaQKDQ2NTJjrULGZHBgMkCS47Y6Lj2ZllGi3U2PeFKrq0WvghplAhvMtAlxvbIgSYuy5fQw7W8hvmii1yzERuskxYWEpyvIpcZfmkIqKQ/h45/DndKRptG0NOs21+kcxP/40cL6dDoujYWEHO/73VK4T+Igpwspd/czIoyIyfb8qaVyjMF2sUfnmWPEahPkl4eHUYhbubT91TKOCHvRxKmChkM84b3+8MBc3SPq7Mvo4Cv65ypPcjCr00wI1FDAXPFnFi1ETx6zzwnkASZr2uU68hES+pLvOfQzKeC7pKe4OEfKO5SJtAUk5orgm+QWK+8btEPYfkPKe6wnsJCXrN64YskvS4C3Qk6qjjFiFVzUUOG2BkQdbjKLWEhP0420G+60ja6z7npJG4vzsmzqjDEg7pMZG8ZrczHiKBp5zg2Y0kPuyxL4FEXmyb8pRKjxW7ViOZV9vkf0GnV3PsySChM7b0Iqn7bPBO0qrYYd1WJPZGy0J/U6sy36o0kvtXi/xlev2lWbMNCb7Nkk5GMRayIo0k1y1QPtPsvdJeEomebC9PtfG2woxq1Ug7GST7sTbmMrqZc1vbgIRf11qecvmWIjXK1cKtbEPS72pBMWhnqM1iSPxYsx+o912zAvUKTbQq9arBRsuQ/Msanabf6QYBRj8zUDeIEjhYNywDQ3VZGRirK8rAJACopgyYKkAfSmEPgC4HwwCn5SAHYMiB4dFQEr+KykI0LgvxlCykdFnQ98nC/pws5C7JwvnbsnD3mSw8K8nCp7IsGBVZqMgDyuL0tCxgRRYq8mDIQvmTLJSeycKzu7Jw+7wsXMrJQm6/LOzTZUFPyUIqLgvxqCxEv5IFzWPIgQGQk4PTAMNyoAP0yEEfgGrKgKkCwKQMFAEAxmQgWzckA8N1gzIwWBcw6ccCdXCafnlouIx+yxoFq9Srao2gQL0CNP2Oej80i1Ev1kw1aGcozWAX7bZBi+Ea5WqRViBPuTy0vI5yG1qba9KNzW0NjtEtA21GqlRj4XZgnGp5aDtJtWR7ynuafVbaA51mabAwxCjGOq2AbRTbBpZqf9Gr7LcGfqVXGiyeX6HW3yGrYCO1toLlHUVaTXqtgz5a9YKdGUplwNY5r+j0wm8PrKbTarBZeUylp4pdsJhKCbDdc5hGuz32AUxR6CE4stukj9njDEgw6rA4OPV36qTBsb7rtLnucw7M/kiZkgpO7mF0YQPg7EUmWRaB00epooPzj9MkCy70XqPIJa8bQD1Hj3MquNP/hhpvguBWbYIWExq4V3tDiTcauDl4jg7n/OBu9RIVrqngdm+WBie8wEGdAOYo8HEREx1bBLwcKIntYy/wU70usuuzgae+NBMV+90HnI2bYjITwN/ehyKa6gYu7xbPYeC0J/FELI+XeHgFoKx5IY5XaxTguj8jiswc4H7vpAiK/SBC79a/eVfZ2AGCDKXLPPs7PR8E6t/GeMW2aSDYzvRnHr3XQyBgJZlnfKmOJxUQdTjD+GEei4DQ527I13hQy6+bC+KPbDPcZuwKAxGV2A+Fqluqhe9jKpBSW5ZnzjPPLAsCRQODI9mi6RRzamxoMACUVfv1M4ZdRm64RwUaa9F4St+fu3T7WalcqeB0pWJ8fnb3fG6/nopHv/IAlwEAVlA4IHgIAACwOACdASrIAMgAPm0yk0YkIyGhL7cowIANiWlu4ME0Z2dc36Rf1TwF/nn82/JL256O/rLnEfsT+C/sHnB3g+7b+q9QL8a/on6Tb4Tj/+M9ALu//uu6A9B/rT/mfcA/Tv/W+g/8e8Ab6l/afYA/iX9A/6n+T9RP/H/vvnE/Mv7r/0P8N8CP8e/m3/O/s/an9Bz9JzCMO+Myw4dZdESvjaIP6nMyViAm4nbqrdpXzuF+/VTRlZPwI47fmfvDl5hbzvvZesvGUKjN7ws4T3LThWfoRQ4AoAA8e9lYL1nVnC85qq4mvj9Iph8YxivL/rrIH+nx3GNtE7c2oXhy5gEx47Z8sNva+odGL0UOFre7a/8pcCVRR+VAqOA+CSTUwrA/FB3RytCzvsm/UJ3htyMkuiFBPkIQkgeyZRBek+/i/YVS2uzhmupStTtUD66oGcS5+EVAH8fWxWkDLcyezIzzEhXFpHDKBmiU8U3CGZPeURs5O40NHmZ1yuL/LSebx3inBTn4fScXY5NtGQmTuvJRgK1ZkPKfur71V57K9aGxoHcFJWEWoT+wdJqn/+CEXgxOXAsN7CY9yIhMXtIKmxfysDoOvJcu/r+kfQ+KAF12SAAA/tznX6ah3T+0kRzuFzn2fQ8fFQ3Jwhf+PAnqgJuUaDbeHheuyuveOy2cK7s60D6dQJ/6PJXvNmeGbDhD3I1NjZn9oc4JUzlHaJP8whuvwQH/+01Q425kRC4hJihTqmiFP5nRhpc7iojBlxQWo4EVUGzU3wuFln27vn07cdcSf8WjpT+zeWz4PVcN8pkVGsiKq7cBNzA3mqWriMtC2CSTtTYTdjMvF7Ijau6AcNe7AIgk6dia7G1bv5y7LBzTXH6/6CeEBeH+MU/ERnSMlwi7VF5hFXzLzRHBV/v1swHdmHAOmY60klxbO1sY26jVQdRXsf4BTv4aJ1LVbXI3ZFDBhVXuY/q0r3zY1ve75gXWrzLOdUpmWVI+ohESBQUZ+rU+U6osFlcstDVHqCNBEOiT1jawA4oQoqrxGTonFFIGpeu2Xj8UTnLeFE2Md3o2FwYCri8tnQ5YN/XViP9pfXkt2qOy/yEtKZZNhoBmIHfHbQrYCz5Kh2bdgqFw5O1uy/Hou6pfwPWz3f2JocCmu2hqd32lMlF7X6xR+B7KcNvMTXv9GkELd8Xu/Kfpoax7QPHsq9RNLcdLDme5+5J8r5I31b/bsb2Dr3veZ5TYKOfV6S+zqm3Pm3su3t6C2kn7SOtPaXrXGBgTZapBKzj5rvfobSFdc/F15bMv/SRizlLib5FfiLOiZ6k/j6LGHT37iY4b1+TQFbOGGn+tY5WRq6SisawiHhcRzE9LURYQtib7q0jxkwQBuBCXPTBOJAkaAG7Vp5dU+qxrV88x9ASUpuVyMu1qzoKu7SKuhpoqbqdcdNWiBV+lVb41JHBEJ7oeHuTt/PyKeXXDdZ5ohET/Y75lxzxI6uzQ5r4IGRvLWul1kvJbP1nnNsAoQhO0tt0L60Pp7S5cTCavKf2FyYs/nxKXq6DKJtk8uRIycOOtWfyvZHYkjulFNNFR/IjND9uujdq66p3qRvf1Nm8SZOIwpni9IrNC9uOzRXnq9BhaDPbRNwLK2DHcYuH1nwzm4cUSUBu7gk5JC5UGE1CfP0TLXwWWr+6JUra/cyPfLM1oeynDbzAToAPUuWA2L9545vaFlmshP8Nj7r96uNDW6ngAyjF0RPM4pH5pGHzui7GI9KmamJujUcZHQLh3/w1hSn4IJdpYUoZMQHC3iKNFeOvi94ZueafCvFyASXifsLgpzzwNt9z5juYKX+CJES84eVuQ5+BncnQF6BZqo2ezHyrmL/ctjxvWpRnj33RIWMu1kccgXDdGBgevRfNIHxouiYJs6eE/Gt593jZcGq/L55a1l9OO/jzhuDz4lDbhFfmbpK/sEfDFeY3XHjDMj+TkeVBLAbJ5EVVKCYeWVurm5iroOfGnBuUSzeP2mKC2ENA3HaMy2jTzGV7FMun64t97WY1zOM8R5FG7QyAeZuNQos1KH15jO1Dymt2/DO6bHgFWloeRbpv+sovdYJSx2/gjgvYHEO2VwRFao5Bny2/MA31GzjN4eTyWvA16rhRu0NMu477gNZEaCRgHvcTVDchTfVckI4Cx4gd4YhUAgjKD5wvCFwX8upRTi2Rdy3FeXDhmw3xEntdU9rGdmncE7jFw+s+vAR2v+V2PjV7jAf9YvwKTdX8v/49RppBE21OCxhVheNMphMDyhtGRopafi54pihoCFgfyYzNzbyFH5voOmGEIIE+cSgHxkMFNbiR3/7BbwcP5SBwy6u/vmP02xcKsRdFj1UW7ZLxdk8dzs8oZEioxMnOR2rxrQ+N9z5nc6U7u40DDVEdwbJuyyQHU/LiE4wmO1N3bxC33X10I8gHmjukAh/Axq6s/eRqK1hwiHkR98coaA6CjFu3hAWv3mwnvK2Y+j/MzAuzOaxEiEMKOJp8JB+dsF7YyNwRM9+pFGSmF95KwrPdEM+/ZUBRDawKe5Kh0NkfZPSesQvMnn3ZgX+3P8ebe1SHo/4p00CyCfNiZ+efcxFg/mw1jqPsN7MOOv+lOglbWG3QEqlDs3OqCKIzANydcibObrwLNDGusfzk42/iksBPVgCgkil6O65u7r5O+q6NhFJRIaVVtGEYSUXNrHnsnBz30tuyBsc1bP9wF3KI7iNCPcDEV+4lT5OMJp5vKeg6p4BGfY76Aap/wY5Pdu/JHo3yI47AqoWq3Z5/P+O6mBnP3a6N/BdS62DLwp0WWwLAn/k1aiJr2oMje765btFUIiGn0MJ3o4nk4ANHxLRR9HrZndMvIAcAOBq5offJZ+HpBeAQ5WEqiPOd5kpWNg0wkdJCMpHwJ4AAAAA==" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.8 KiB |
3
frontend/public/Logos/openlit.svg
Normal file
3
frontend/public/Logos/openlit.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 14 KiB |
3
frontend/public/Logos/traceloop.svg
Normal file
3
frontend/public/Logos/traceloop.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="256" height="256" viewBox="0 0 256 256" preserveAspectRatio="xMidYMid meet">
|
||||
<image width="256" height="256" href="data:image/webp;base64,UklGRjoOAABXRUJQVlA4WAoAAAAQAAAA/wAA/wAAQUxQSLsGAAAB8IZtE+q22baNJFsOrKIpqC01lLw1jOWayszMTGFmZo6s0nWVmQzXFeb4KoUjwwVO1BguV3HBDIL5YZqZNda/bIqICYCw/8P+DxvYmulyt2AX/ce+2UkyIm5/v8iHWra4XZnWDt1ciF178NP+Au7/H2pbeHN7did2/dV3cfX4HDV22tuw52AoGHicp8c+1DnH3sqJoaH/OpavUWsnANyEoeKvPRmeQL1vAmthyIBT6ezlihVaMzB0rLCRPYKap2eFEHg12beqZblDiclkv6nmbg4l3qEyUPVmDCW/pOqnGwZDifepLtAt6AklFlBBnWqer0OJu8j2qPb1UyFEw7lkk1V7yqgKHf4G5PGNilUZMClkaEygg+WKTQKI/DlUmACMRrFaP0cCQL/S0OADCwdcekap0n7QekBRKOC0AW+KR6WiAdC2scHf1VU+COwXvK+Pf4MBHUxaW9qFBX58tTtITMmqVKV0bRJ01nHDXV3z7aPOBbkJaXdpeYMDwv4/C6GR+uJYLV/OPF+MdeCjY8W/ekusnJQHxtK/mGp0JiPXh5oGdtxlkeBY5UVz5j8aISFmYSny+nIzOtJnM+p7IJnNNr0RzXtsMN9LNShwc592RnhR49oMph7/RFM3PcBk/whleke0MbAadW66jiVyG5o8cBeL5TOUWj0QAIwS1Pq3OI7laPraJI7xKLfEAFiOen/IMNBvPtzG0L9BEC6H6AbFgol0X6GGY+g2ouSG6NdQ80VkF/pUeJfM/qcofC1PtYNkD6CKZWTXouy8MtVaIqgW64AxVG8KK0Pd46neV2Iw1VJh2juoPtJtjbR63XpQOZVIppohrP6gauVA/YYOTRFUDwo7uEa1T8kG6bAbqHsJWzNKtTvJoEiFF8hgj6xRkK/Yf6x0L2pwpjvdnaLyAUYE9LoF6COOKPAMMG4XFBgBANPU2gCcl1ab7gvg7OuVMw1ab1LqcxsLpNabbEd3Fhh6RsomaHuCT6HgQhswDykxlcsOzJccF+GbAO1fnq3O7pHA32P+X6b5MRX47ZOq+LIvhw4nTMx2e7Qsypt5Ocjs+dDffjzlkV68dd4wkBl1tyv/pIfcnT0xAcL+P9twwsRst4f8ZL7rnighQ+duLvaI//FvjxhCLpuZW+TR0p09MaFjl2cje9Vku4AbfkSz/rWop4DhO1Hb7Ms7MMGHEo9fwmXfiGY+NYzLuiCI+vomtLMJhf4+nCdqG5q7IZXH9gnqvKmNaSjW24/lUzR7bQrLWtR6GgCMCMjBnRxPovmPRzDciGoHRgDko+S76Lr/XwF8ic5aqBfmwygUfYDuOdTwv3S3o+aj1sjCvmTbVcBhZB+ptuagsIepbI06jCX7VbWD9cLmUF2EOrqouqPq9Sh8LdVgJT6n6qOb+JVUVyjxAVW0cmXCxlFdqMQyKmuzamV5wq6ngtM6PEQFP6uW95qs6iiyt1TwxZLNU+216AZRm4B8pArfAHmCX7GGaFguqdFBB/9QIDCIDv6m2HIAo0TQRGBMrDXfCmCMrVSrxACAgdVivrRwwB0Bs+2I5IBrGpSqHgitR3iFfGoH3vuazLWlB/CmV6vkHQFt99ksoeY1C3APPmKipuk24E48oNDmPtDBjFwfU9nCWBBoe/iASbxrHCDQctf2gCq+3AzopJH64lj6B1MsIDX2llfHin9soBWknpfx8lgtX0w1IOz/sxA6briL/LbhhhzrxZl3aXltX0GxV90l/gZHp5LWliKrf++zdhGj3/8DNT251CEiZnYBmrJ0bVJHjA1+5D91M1+fb1HdlsVRbLZJdWha/wajnQFFKHO+hWmMFzU+GMd07lY0ddGANvqVotQVPCMbUOeiGJYe/0KTl/YDgMifUe6jHLEVqPU2C8ff0fQ/RwJMQsF/RDN8gHo/zZCJCk4Co0oSLqG7LKjY6Qi6nzSoMp5C0ZU2spWo+a1kQ1DFp76WhaPJilVzkc3U4WuPsNepzkHVD5J9r4MnKGw5VYpulWSHdQii8LVUg3X7i+y4DtgsbAFVsm6nyX7Wodkt7EmqSL9qu8k+1sGdJSyJCvJVW0D2kg5ZGbKOAfk41QaT9Q6okG4tFPUU3fm1iu0B+s81KLTCTZKORdDBJL2CoxgSGhW4CQCcchquAEbbHrUWAOcL5nMCANhzpLTcCqzRJ5T6yMICS82WY28FdqeMymuB+fzNKi22APM4n6mcdmj35kK+lndjgN3yfIU6B68B/kF7zFN4M3TUmulytzB4t0/pCyLtD3z8X78ajUc2XAMyhy8/WCevxe3KtELY/2H/hw0OAFZQOCBYBwAAcDIAnQEqAAEAAT5tMpJFpCOhmApkyEAGxLE3cLO/I/8A/AD9APeogBDf+wH8n/gH4AfoB/D8D/bUGJv3b8rvGLBD2z8kv6L+13LBGO/UZ+8+1n5jf2T+j+wDzCP0i/03UP8wn6r/83/Ye+Z/jv7B7I/13/ID5AP6p/eP/Z7WHqRft57AH8g/2XpTfsZ/3fkf/a/9xvgd/XX/4ewB6AHVf9AP4B+AH6V0/6X/37XCnfIaNJCv3UeIbyMN5GG7FGGXSbcdew8aH+Bc8jPujAWD5wlI0vGluc15OXPagLZ8UxFfBAlngGwyAgncED6mebGS9plnBjVRDpz0QjHI7NbLJCxCvFKIDQxTIXTAX5W3acocmvQ/OXAfnjdGNjAFiZXJq+/TbBzGEHdg+muQ+hFFPR2JnvUSNpSYiGR57j5nlFet8+EyOPVmbGlR6r47DqM+38DTtDHOHz36jaDYexg9qHpxk3+59nQC1SOzy4m4u34GLkKXdKCpMehdu53FcSHUZwi37E2m0zNsuu+Q0aSFfuo8Q3kYbyMN5GG6AAD8hoAAAAAAAtSatOsx3/xQJLZJVNwvp3oF/sWdRuskPj8s391kh8d5sIy+9WFVihdPhMB/7oNptL8qeszrOKuQiABZ2qtQOEHxZqWJnio26h/5Vo4krKGOO0sEz6Hv5rWo4fY5j+Zkip4N2flcerdMsQa4aobjtm8EeilMPd1RKe3T2L0a/Cv9h3t065fg6SjwXyBvg2hYN5RVMz/+NTMiir38FhGGkGCU+n7zH7a6z+ZsKOKACwNxEF24gJYE5Z8YDT/LDnQYzNPs7fnw0DWqqrx34u1n8kIsG90iig+vnPhkaVDzEnlMOItgwAov/p/WcPmzTS/ESKniY3/MPn06vQ7wic6l2jPoEyPrpPntLEOo7YMH2DlbEeYrf53exjRiZcoP/Ct0vnxkiHQpZk7RAgR6cllYgknBYToYbOnOoJ31iIPn+tjP9Wi1AD4rjN8FcbwzXiiXhq7Gpi/P39FnE7kEHxM3vJ6IfeN8kET6YhP9+bS4Ndq9z76kggowPI6pCeXTbktqMjiZBKP512NVTKWySMPWK2cDgYHLavMlfOsRt+ejI3anQJ8WOB5Sj4jg+PmmRO0YbrVrKJlMyW9DQhQxUYSb68hxwsa0pT95muv26wX/UbDfNNZqiY+FY3E3IrOLBfx/PvkZag+Z1tU15DmS4oWxI1MZPiItkOO5brQrLJ4gEX/aqDZVYH5qQqEYAdzEawMyn+D8EIsnVFuZBNbvILVTzHSwk5p+TPibb4Bij8La/IJ3+b8zP3KyRwEs7cd4bm+Kk65mOeRHpY//wUWtOhkyo+qCpkP9B+BZwjtgg93EV8KAePfDCEvQd6szMwcXg364KeljltsktN3CEwdKtH5sTvqV4jy9/ySVzVng2aUX/rOq5NjhT5vpK5nuKGWPdhY6RWG/JJfzsRE9JFNVJDLuZWWPyOhtK52DWPq8KkvizSdW1HXse/jEbnWBMMY4QwuO7eYDSR462HFk4zWyEZKugT+Uop5KD4KF+4xpfPgWfPcD4doS/+fEI2HjOrl8ctTT0EKp255SHoe3jdEi6QVMzZoAJS4j6vntijLltNjzgznBcSIrdKAgCdZjX9+VDrtqECJvxN3Z0S2oEoVZf2tji5V6DXamlubRVYpvsgB0DK/ACFIRFc4ggMNRpcbv9wYeN/ch3k6B5RkfzyDTkXb+RkS8ZLdbq1+tq/I8eK9g1/wuAZNhQkc5QmjUYgl3QAo86JmxC4zO0au6U/fSE11r16YB3WkeheQcJUKCbRTGS8OOs30DN7fpcxuuyEU3yM8HEqoyjd1rdQAi9K6LdRvQyLh3A9JmvA1EAjN8i2uLOeZi81BhiwAsZle61AOM78/l7yxuHy40ECHE9YostmdueER4HTWcMMGg0X8eJcujrbN1S4qyGxITPLghrKViISKTyPZ5qSo6EwnWhwA+XrAtKvtNVyS5f2iWAFmfHlQxfB+NXNwhxPwQ2akkMg3wgSifNE4lDRelowqblVu+ByzHBWsJkemp5mw2u/1Wl+RmpkyV7U5cIjxTkKyprg12fSsb7L3rIIHxd4ZgDArHDgESQGwDGC+iMlFrZKAfhcSUxuvPko5fkWdmQ/+oI2RMP4/d0h4/foef6PFHier1x+nRlT+0ZjBnE6Z32aVtbLbEPc+Z/EgAGsRF1RpteYADfJzufleSz65a2AZGpwwvDlrHbL9WdGx4Ww776VnxhYV27x6KH0biDB/mTxsIwv4zQQiKkDjcibMmzgp2PvZ3foNNj+SEDx11+lqzPpayPGMBRlQgr9yVhV93j2rzdy+mYvCoHNBVmA6kubmaB2ilYOI/AMpq6CFrzpkL8UJoedOwyLxtwJIWPjP+a++KkMbgYxBuo7dViu8XMjZ9jD1lMMABEIMT5fvOnzyyoKUFfnseMxeoFZjuySXC9vxk95ivyfGaZgXSPDdaiQAGYrj9aBUAAAAAAAA=" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 4.9 KiB |
@@ -12,5 +12,6 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
}
|
||||
|
||||
@@ -12,5 +12,6 @@
|
||||
"pipeline": "Pipeline",
|
||||
"pipelines": "Pipelines",
|
||||
"archives": "Archives",
|
||||
"logs_to_metrics": "Logs To Metrics"
|
||||
"logs_to_metrics": "Logs To Metrics",
|
||||
"roles": "Roles"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"SIGN_UP": "SigNoz | Sign Up",
|
||||
"LOGIN": "SigNoz | Login",
|
||||
"FORGOT_PASSWORD": "SigNoz | Forgot Password",
|
||||
"HOME": "SigNoz | Home",
|
||||
"SERVICE_METRICS": "SigNoz | Service Metrics",
|
||||
"SERVICE_MAP": "SigNoz | Service Map",
|
||||
@@ -72,5 +73,6 @@
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
"METER": "SigNoz | Meter",
|
||||
"ROLES_SETTINGS": "SigNoz | Roles"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "\n\n---\nRenamed tag files to index.ts...\n"
|
||||
# Rename tag files to index.ts in services directories
|
||||
# tags-split creates: services/tagName/tagName.ts -> rename to services/tagName/index.ts
|
||||
find src/api/generated/services -mindepth 1 -maxdepth 1 -type d | while read -r dir; do
|
||||
@@ -11,4 +12,33 @@ find src/api/generated/services -mindepth 1 -maxdepth 1 -type d | while read -r
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Tag files renamed to index.ts"
|
||||
echo "\n✅ Tag files renamed to index.ts"
|
||||
|
||||
# Format generated files
|
||||
echo "\n\n---\nRunning prettier...\n"
|
||||
if ! prettier --write src/api/generated; then
|
||||
echo "Prettier formatting failed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "\n✅ Prettier formatting successful"
|
||||
|
||||
|
||||
# Fix linting issues
|
||||
echo "\n\n---\nRunning eslint...\n"
|
||||
if ! yarn lint --fix --quiet src/api/generated; then
|
||||
echo "ESLint check failed! Please fix linting errors before proceeding."
|
||||
exit 1
|
||||
fi
|
||||
echo "\n✅ ESLint check successful"
|
||||
|
||||
|
||||
# Check for type errors
|
||||
echo "\n\n---\nChecking for type errors...\n"
|
||||
if ! tsc --noEmit; then
|
||||
echo "Type check failed! Please fix type errors before proceeding."
|
||||
exit 1
|
||||
fi
|
||||
echo "\n✅ Type check successful"
|
||||
|
||||
|
||||
echo "\n\n---\n ✅✅✅ API generation complete!"
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getAll from 'api/v1/user/get';
|
||||
@@ -9,9 +12,6 @@ import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, useLocation } from 'react-router-dom';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { ConfigProvider } from 'antd';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
@@ -30,9 +33,6 @@ import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import { Suspense, useCallback, useEffect, useState } from 'react';
|
||||
import { Route, Router, Switch } from 'react-router-dom';
|
||||
import { CompatRouter } from 'react-router-dom-v5-compat';
|
||||
import { LicenseStatus } from 'types/api/licensesV3/getActive';
|
||||
import { extractDomain } from 'utils/app';
|
||||
|
||||
|
||||
@@ -194,6 +194,10 @@ export const Login = Loadable(
|
||||
() => import(/* webpackChunkName: "Login" */ 'pages/Login'),
|
||||
);
|
||||
|
||||
export const ForgotPassword = Loadable(
|
||||
() => import(/* webpackChunkName: "ForgotPassword" */ 'pages/ForgotPassword'),
|
||||
);
|
||||
|
||||
export const UnAuthorized = Loadable(
|
||||
() => import(/* webpackChunkName: "UnAuthorized" */ 'pages/UnAuthorized'),
|
||||
);
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { RouteProps } from 'react-router-dom';
|
||||
import ROUTES from 'constants/routes';
|
||||
import AlertTypeSelectionPage from 'pages/AlertTypeSelection';
|
||||
import MessagingQueues from 'pages/MessagingQueues';
|
||||
import MeterExplorer from 'pages/MeterExplorer';
|
||||
import { RouteProps } from 'react-router-dom';
|
||||
|
||||
import {
|
||||
AlertHistory,
|
||||
@@ -17,6 +17,7 @@ import {
|
||||
DashboardWidget,
|
||||
EditRulesPage,
|
||||
ErrorDetails,
|
||||
ForgotPassword,
|
||||
Home,
|
||||
InfrastructureMonitoring,
|
||||
InstalledIntegrations,
|
||||
@@ -339,6 +340,13 @@ const routes: AppRoutes[] = [
|
||||
isPrivate: false,
|
||||
key: 'LOGIN',
|
||||
},
|
||||
{
|
||||
path: ROUTES.FORGOT_PASSWORD,
|
||||
exact: true,
|
||||
component: ForgotPassword,
|
||||
isPrivate: false,
|
||||
key: 'FORGOT_PASSWORD',
|
||||
},
|
||||
{
|
||||
path: ROUTES.UN_AUTHORIZED,
|
||||
exact: true,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { initReactI18next } from 'react-i18next';
|
||||
import i18n from 'i18next';
|
||||
import LanguageDetector from 'i18next-browser-languagedetector';
|
||||
import Backend from 'i18next-http-backend';
|
||||
import { initReactI18next } from 'react-i18next';
|
||||
|
||||
import cacheBursting from '../../i18n-translations-hash.json';
|
||||
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
CreateIngestionKeyProps,
|
||||
IngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const createIngestionKey = async (
|
||||
props: CreateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post('/workspaces/me/keys', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default createIngestionKey;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/keys/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteIngestionKey;
|
||||
@@ -1,21 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import {
|
||||
AllIngestionKeyProps,
|
||||
GetIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
export const getAllIngestionKeys = (
|
||||
props: GetIngestionKeyProps,
|
||||
): Promise<AxiosResponse<AllIngestionKeyProps>> => {
|
||||
// eslint-disable-next-line @typescript-eslint/naming-convention
|
||||
const { search, per_page, page } = props;
|
||||
|
||||
const BASE_URL = '/workspaces/me/keys';
|
||||
const URL_QUERY_PARAMS =
|
||||
search && search.length > 0
|
||||
? `/search?name=${search}&page=1&per_page=100`
|
||||
: `?page=${page}&per_page=${per_page}`;
|
||||
|
||||
return GatewayApiV1Instance.get(`${BASE_URL}${URL_QUERY_PARAMS}`);
|
||||
};
|
||||
@@ -1,65 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
AddLimitProps,
|
||||
LimitSuccessProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const createLimitForIngestionKey = async (
|
||||
props: AddLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.post(
|
||||
`/workspaces/me/keys/${props.keyID}/limits`,
|
||||
{
|
||||
...props,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default createLimitForIngestionKey;
|
||||
@@ -1,26 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { AllIngestionKeyProps } from 'types/api/ingestionKeys/types';
|
||||
|
||||
const deleteLimitsForIngestionKey = async (
|
||||
id: string,
|
||||
): Promise<SuccessResponse<AllIngestionKeyProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.delete(
|
||||
`/workspaces/me/limits/${id}`,
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteLimitsForIngestionKey;
|
||||
@@ -1,65 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-throw-literal */
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import axios from 'axios';
|
||||
import {
|
||||
LimitSuccessProps,
|
||||
UpdateLimitProps,
|
||||
} from 'types/api/ingestionKeys/limits/types';
|
||||
|
||||
interface SuccessResponse<T> {
|
||||
statusCode: number;
|
||||
error: null;
|
||||
message: string;
|
||||
payload: T;
|
||||
}
|
||||
|
||||
interface ErrorResponse {
|
||||
statusCode: number;
|
||||
error: string;
|
||||
message: string;
|
||||
payload: null;
|
||||
}
|
||||
|
||||
const updateLimitForIngestionKey = async (
|
||||
props: UpdateLimitProps,
|
||||
): Promise<SuccessResponse<LimitSuccessProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/limits/${props.limitID}`,
|
||||
{
|
||||
config: props.config,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
// Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: error.response?.status || 500,
|
||||
error: error.response?.data?.error,
|
||||
message: error.response?.data?.status || 'An error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
} else {
|
||||
// Non-Axios error
|
||||
const errResponse: ErrorResponse = {
|
||||
statusCode: 500,
|
||||
error: 'Unknown error',
|
||||
message: 'An unknown error occurred',
|
||||
payload: null,
|
||||
};
|
||||
|
||||
throw errResponse;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default updateLimitForIngestionKey;
|
||||
@@ -1,32 +0,0 @@
|
||||
import { GatewayApiV1Instance } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import {
|
||||
IngestionKeysPayloadProps,
|
||||
UpdateIngestionKeyProps,
|
||||
} from 'types/api/ingestionKeys/types';
|
||||
|
||||
const updateIngestionKey = async (
|
||||
props: UpdateIngestionKeyProps,
|
||||
): Promise<SuccessResponse<IngestionKeysPayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await GatewayApiV1Instance.patch(
|
||||
`/workspaces/me/keys/${props.id}`,
|
||||
{
|
||||
...props.data,
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default updateIngestionKey;
|
||||
@@ -4,8 +4,6 @@ export const apiV2 = '/api/v2/';
|
||||
export const apiV3 = '/api/v3/';
|
||||
export const apiV4 = '/api/v4/';
|
||||
export const apiV5 = '/api/v5/';
|
||||
export const gatewayApiV1 = '/api/gateway/v1/';
|
||||
export const gatewayApiV2 = '/api/gateway/v2/';
|
||||
export const apiAlertManager = '/api/alertmanager/';
|
||||
|
||||
export default apiV1;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import { DeploymentsDataProps } from 'types/api/customDomain/types';
|
||||
|
||||
export const getDeploymentsData = (): Promise<
|
||||
AxiosResponse<DeploymentsDataProps>
|
||||
> => axios.get(`/deployments/me`);
|
||||
@@ -1,16 +0,0 @@
|
||||
import { GatewayApiV2Instance as axios } from 'api';
|
||||
import { AxiosError } from 'axios';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
PayloadProps,
|
||||
UpdateCustomDomainProps,
|
||||
} from 'types/api/customDomain/types';
|
||||
|
||||
const updateSubDomainAPI = async (
|
||||
props: UpdateCustomDomainProps,
|
||||
): Promise<SuccessResponse<PayloadProps> | AxiosError> =>
|
||||
axios.put(`/deployments/me/host`, {
|
||||
...props.data,
|
||||
});
|
||||
|
||||
export default updateSubDomainAPI;
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
|
||||
const dashboardVariablesQuery = async (
|
||||
props: Props,
|
||||
signal?: AbortSignal,
|
||||
): Promise<SuccessResponse<VariableResponseProps> | ErrorResponse> => {
|
||||
try {
|
||||
const { globalTime } = store.getState();
|
||||
@@ -32,7 +33,7 @@ const dashboardVariablesQuery = async (
|
||||
|
||||
payload.variables = { ...payload.variables, ...timeVariables };
|
||||
|
||||
const response = await axios.post(`/variables/query`, payload);
|
||||
const response = await axios.post(`/variables/query`, payload, { signal });
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
|
||||
@@ -19,6 +19,7 @@ export const getFieldValues = async (
|
||||
startUnixMilli?: number,
|
||||
endUnixMilli?: number,
|
||||
existingQuery?: string,
|
||||
abortSignal?: AbortSignal,
|
||||
): Promise<SuccessResponseV2<FieldValueResponse>> => {
|
||||
const params: Record<string, string> = {};
|
||||
|
||||
@@ -47,7 +48,10 @@ export const getFieldValues = async (
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.get('/fields/values', { params });
|
||||
const response = await axios.get('/fields/values', {
|
||||
params,
|
||||
signal: abortSignal,
|
||||
});
|
||||
|
||||
// Normalize values from different types (stringValues, boolValues, etc.)
|
||||
if (response.data?.data?.values) {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,7 +15,9 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
AuthtypesPostableAuthDomainDTO,
|
||||
AuthtypesUpdateableAuthDomainDTO,
|
||||
@@ -27,8 +28,6 @@ import type {
|
||||
UpdateAuthDomainPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,7 +15,9 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
CreatePublicDashboard201,
|
||||
CreatePublicDashboardPathParameters,
|
||||
@@ -33,14 +34,12 @@ import type {
|
||||
UpdatePublicDashboardPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoints deletes the public sharing config and disables the public sharing of a dashboard
|
||||
* This endpoint deletes the public sharing config and disables the public sharing of a dashboard
|
||||
* @summary Delete public dashboard
|
||||
*/
|
||||
export const deletePublicDashboard = ({
|
||||
@@ -119,7 +118,7 @@ export const useDeletePublicDashboard = <
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoints returns public sharing config for a dashboard
|
||||
* This endpoint returns public sharing config for a dashboard
|
||||
* @summary Get public dashboard
|
||||
*/
|
||||
export const getPublicDashboard = (
|
||||
@@ -223,7 +222,7 @@ export const invalidateGetPublicDashboard = async (
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoints creates public sharing config and enables public sharing of the dashboard
|
||||
* This endpoint creates public sharing config and enables public sharing of the dashboard
|
||||
* @summary Create public dashboard
|
||||
*/
|
||||
export const createPublicDashboard = (
|
||||
@@ -322,7 +321,7 @@ export const useCreatePublicDashboard = <
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoints updates the public sharing config for a dashboard
|
||||
* This endpoint updates the public sharing config for a dashboard
|
||||
* @summary Update public dashboard
|
||||
*/
|
||||
export const updatePublicDashboard = (
|
||||
@@ -419,7 +418,7 @@ export const useUpdatePublicDashboard = <
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoints returns the sanitized dashboard data for public access
|
||||
* This endpoint returns the sanitized dashboard data for public access
|
||||
* @summary Get public dashboard data
|
||||
*/
|
||||
export const getPublicDashboardData = (
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
QueryClient,
|
||||
@@ -13,10 +12,10 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
|
||||
import type { GetFeatures200, RenderErrorResponseDTO } from '../sigNoz.schemas';
|
||||
import { useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type { GetFeatures200, RenderErrorResponseDTO } from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
|
||||
222
frontend/src/api/generated/services/fields/index.ts
Normal file
222
frontend/src/api/generated/services/fields/index.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetFieldsKeys200,
|
||||
GetFieldsKeysParams,
|
||||
GetFieldsValues200,
|
||||
GetFieldsValuesParams,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint returns field keys
|
||||
* @summary Get field keys
|
||||
*/
|
||||
export const getFieldsKeys = (
|
||||
params?: GetFieldsKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetFieldsKeys200>({
|
||||
url: `/api/v1/fields/keys`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetFieldsKeysQueryKey = (params?: GetFieldsKeysParams) => {
|
||||
return ['getFieldsKeys', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getGetFieldsKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getFieldsKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetFieldsKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getFieldsKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetFieldsKeysQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getFieldsKeys>>> = ({
|
||||
signal,
|
||||
}) => getFieldsKeys(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getFieldsKeys>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetFieldsKeysQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getFieldsKeys>>
|
||||
>;
|
||||
export type GetFieldsKeysQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get field keys
|
||||
*/
|
||||
|
||||
export function useGetFieldsKeys<
|
||||
TData = Awaited<ReturnType<typeof getFieldsKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetFieldsKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getFieldsKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetFieldsKeysQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get field keys
|
||||
*/
|
||||
export const invalidateGetFieldsKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetFieldsKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetFieldsKeysQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint returns field values
|
||||
* @summary Get field values
|
||||
*/
|
||||
export const getFieldsValues = (
|
||||
params?: GetFieldsValuesParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetFieldsValues200>({
|
||||
url: `/api/v1/fields/values`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetFieldsValuesQueryKey = (params?: GetFieldsValuesParams) => {
|
||||
return ['getFieldsValues', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getGetFieldsValuesQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getFieldsValues>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetFieldsValuesParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getFieldsValues>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetFieldsValuesQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getFieldsValues>>> = ({
|
||||
signal,
|
||||
}) => getFieldsValues(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getFieldsValues>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetFieldsValuesQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getFieldsValues>>
|
||||
>;
|
||||
export type GetFieldsValuesQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get field values
|
||||
*/
|
||||
|
||||
export function useGetFieldsValues<
|
||||
TData = Awaited<ReturnType<typeof getFieldsValues>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetFieldsValuesParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getFieldsValues>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetFieldsValuesQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get field values
|
||||
*/
|
||||
export const invalidateGetFieldsValues = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetFieldsValuesParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetFieldsValuesQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,7 +15,9 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
CreateIngestionKey200,
|
||||
CreateIngestionKeyLimit201,
|
||||
@@ -27,14 +28,14 @@ import type {
|
||||
GatewaytypesPostableIngestionKeyLimitDTO,
|
||||
GatewaytypesUpdatableIngestionKeyLimitDTO,
|
||||
GetIngestionKeys200,
|
||||
GetIngestionKeysParams,
|
||||
RenderErrorResponseDTO,
|
||||
SearchIngestionKeys200,
|
||||
SearchIngestionKeysParams,
|
||||
UpdateIngestionKeyLimitPathParameters,
|
||||
UpdateIngestionKeyPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
@@ -43,35 +44,44 @@ type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
* This endpoint returns the ingestion keys for a workspace
|
||||
* @summary Get ingestion keys for workspace
|
||||
*/
|
||||
export const getIngestionKeys = (signal?: AbortSignal) => {
|
||||
export const getIngestionKeys = (
|
||||
params?: GetIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetIngestionKeys200>({
|
||||
url: `/api/v2/gateway/ingestion_keys`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetIngestionKeysQueryKey = () => {
|
||||
return ['getIngestionKeys'] as const;
|
||||
export const getGetIngestionKeysQueryKey = (
|
||||
params?: GetIngestionKeysParams,
|
||||
) => {
|
||||
return ['getIngestionKeys', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getGetIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
>(
|
||||
params?: GetIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetIngestionKeysQueryKey();
|
||||
const queryKey = queryOptions?.queryKey ?? getGetIngestionKeysQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getIngestionKeys>>> = ({
|
||||
signal,
|
||||
}) => getIngestionKeys(signal);
|
||||
}) => getIngestionKeys(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
@@ -92,14 +102,17 @@ export type GetIngestionKeysQueryError = RenderErrorResponseDTO;
|
||||
export function useGetIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetIngestionKeysQueryOptions(options);
|
||||
>(
|
||||
params?: GetIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetIngestionKeysQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -115,10 +128,11 @@ export function useGetIngestionKeys<
|
||||
*/
|
||||
export const invalidateGetIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetIngestionKeysQueryKey() },
|
||||
{ queryKey: getGetIngestionKeysQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
@@ -663,35 +677,45 @@ export const useUpdateIngestionKeyLimit = <
|
||||
* This endpoint returns the ingestion keys for a workspace
|
||||
* @summary Search ingestion keys for workspace
|
||||
*/
|
||||
export const searchIngestionKeys = (signal?: AbortSignal) => {
|
||||
export const searchIngestionKeys = (
|
||||
params: SearchIngestionKeysParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<SearchIngestionKeys200>({
|
||||
url: `/api/v2/gateway/ingestion_keys/search`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getSearchIngestionKeysQueryKey = () => {
|
||||
return ['searchIngestionKeys'] as const;
|
||||
export const getSearchIngestionKeysQueryKey = (
|
||||
params?: SearchIngestionKeysParams,
|
||||
) => {
|
||||
return ['searchIngestionKeys', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getSearchIngestionKeysQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}) => {
|
||||
>(
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getSearchIngestionKeysQueryKey();
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getSearchIngestionKeysQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>
|
||||
> = ({ signal }) => searchIngestionKeys(signal);
|
||||
> = ({ signal }) => searchIngestionKeys(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
@@ -712,14 +736,17 @@ export type SearchIngestionKeysQueryError = RenderErrorResponseDTO;
|
||||
export function useSearchIngestionKeys<
|
||||
TData = Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getSearchIngestionKeysQueryOptions(options);
|
||||
>(
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof searchIngestionKeys>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getSearchIngestionKeysQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -735,10 +762,11 @@ export function useSearchIngestionKeys<
|
||||
*/
|
||||
export const invalidateSearchIngestionKeys = async (
|
||||
queryClient: QueryClient,
|
||||
params: SearchIngestionKeysParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getSearchIngestionKeysQueryKey() },
|
||||
{ queryKey: getSearchIngestionKeysQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
QueryClient,
|
||||
@@ -13,20 +12,20 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetGlobalConfig200,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoints returns global config
|
||||
* This endpoint returns global config
|
||||
* @summary Get global config
|
||||
*/
|
||||
export const getGlobalConfig = (signal?: AbortSignal) => {
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,15 +15,15 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
ListPromotedAndIndexedPaths200,
|
||||
PromotetypesPromotePathDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,20 +15,25 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetMetricAlerts200,
|
||||
GetMetricAlertsParams,
|
||||
GetMetricAlertsPathParameters,
|
||||
GetMetricAttributes200,
|
||||
GetMetricAttributesParams,
|
||||
GetMetricAttributesPathParameters,
|
||||
GetMetricDashboards200,
|
||||
GetMetricDashboardsParams,
|
||||
GetMetricDashboardsPathParameters,
|
||||
GetMetricHighlights200,
|
||||
GetMetricHighlightsParams,
|
||||
GetMetricHighlightsPathParameters,
|
||||
GetMetricMetadata200,
|
||||
GetMetricMetadataParams,
|
||||
GetMetricMetadataPathParameters,
|
||||
GetMetricsStats200,
|
||||
GetMetricsTreemap200,
|
||||
MetricsexplorertypesMetricAttributesRequestDTO,
|
||||
ListMetrics200,
|
||||
ListMetricsParams,
|
||||
MetricsexplorertypesStatsRequestDTO,
|
||||
MetricsexplorertypesTreemapRequestDTO,
|
||||
MetricsexplorertypesUpdateMetricMetadataRequestDTO,
|
||||
@@ -37,37 +41,133 @@ import type {
|
||||
UpdateMetricMetadataPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint returns associated alerts for a specified metric
|
||||
* @summary Get metric alerts
|
||||
* This endpoint returns a list of distinct metric names within the specified time range
|
||||
* @summary List metric names
|
||||
*/
|
||||
export const getMetricAlerts = (
|
||||
params?: GetMetricAlertsParams,
|
||||
export const listMetrics = (
|
||||
params?: ListMetricsParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricAlerts200>({
|
||||
url: `/api/v2/metric/alerts`,
|
||||
return GeneratedAPIInstance<ListMetrics200>({
|
||||
url: `/api/v2/metrics`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricAlertsQueryKey = (params?: GetMetricAlertsParams) => {
|
||||
return ['getMetricAlerts', ...(params ? [params] : [])] as const;
|
||||
export const getListMetricsQueryKey = (params?: ListMetricsParams) => {
|
||||
return ['listMetrics', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getListMetricsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof listMetrics>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: ListMetricsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listMetrics>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getListMetricsQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof listMetrics>>> = ({
|
||||
signal,
|
||||
}) => listMetrics(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listMetrics>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type ListMetricsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof listMetrics>>
|
||||
>;
|
||||
export type ListMetricsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary List metric names
|
||||
*/
|
||||
|
||||
export function useListMetrics<
|
||||
TData = Awaited<ReturnType<typeof listMetrics>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: ListMetricsParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listMetrics>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getListMetricsQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary List metric names
|
||||
*/
|
||||
export const invalidateListMetrics = async (
|
||||
queryClient: QueryClient,
|
||||
params?: ListMetricsParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getListMetricsQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint returns associated alerts for a specified metric
|
||||
* @summary Get metric alerts
|
||||
*/
|
||||
export const getMetricAlerts = (
|
||||
{ metricName }: GetMetricAlertsPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricAlerts200>({
|
||||
url: `/api/v2/metrics/${metricName}/alerts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricAlertsQueryKey = ({
|
||||
metricName,
|
||||
}: GetMetricAlertsPathParameters) => {
|
||||
return ['getMetricAlerts'] as const;
|
||||
};
|
||||
|
||||
export const getGetMetricAlertsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricAlertsParams,
|
||||
{ metricName }: GetMetricAlertsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
@@ -78,13 +178,19 @@ export const getGetMetricAlertsQueryOptions = <
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetMetricAlertsQueryKey(params);
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetMetricAlertsQueryKey({ metricName });
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getMetricAlerts>>> = ({
|
||||
signal,
|
||||
}) => getMetricAlerts(params, signal);
|
||||
}) => getMetricAlerts({ metricName }, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!metricName,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
TError,
|
||||
TData
|
||||
@@ -104,7 +210,7 @@ export function useGetMetricAlerts<
|
||||
TData = Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricAlertsParams,
|
||||
{ metricName }: GetMetricAlertsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAlerts>>,
|
||||
@@ -113,7 +219,7 @@ export function useGetMetricAlerts<
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetMetricAlertsQueryOptions(params, options);
|
||||
const queryOptions = getGetMetricAlertsQueryOptions({ metricName }, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -129,11 +235,126 @@ export function useGetMetricAlerts<
|
||||
*/
|
||||
export const invalidateGetMetricAlerts = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricAlertsParams,
|
||||
{ metricName }: GetMetricAlertsPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetMetricAlertsQueryKey(params) },
|
||||
{ queryKey: getGetMetricAlertsQueryKey({ metricName }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint returns attribute keys and their unique values for a specified metric
|
||||
* @summary Get metric attributes
|
||||
*/
|
||||
export const getMetricAttributes = (
|
||||
{ metricName }: GetMetricAttributesPathParameters,
|
||||
params?: GetMetricAttributesParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricAttributes200>({
|
||||
url: `/api/v2/metrics/${metricName}/attributes`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricAttributesQueryKey = (
|
||||
{ metricName }: GetMetricAttributesPathParameters,
|
||||
params?: GetMetricAttributesParams,
|
||||
) => {
|
||||
return ['getMetricAttributes', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getGetMetricAttributesQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ metricName }: GetMetricAttributesPathParameters,
|
||||
params?: GetMetricAttributesParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ??
|
||||
getGetMetricAttributesQueryKey({ metricName }, params);
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>
|
||||
> = ({ signal }) => getMetricAttributes({ metricName }, params, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!metricName,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetMetricAttributesQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>
|
||||
>;
|
||||
export type GetMetricAttributesQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get metric attributes
|
||||
*/
|
||||
|
||||
export function useGetMetricAttributes<
|
||||
TData = Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ metricName }: GetMetricAttributesPathParameters,
|
||||
params?: GetMetricAttributesParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetMetricAttributesQueryOptions(
|
||||
{ metricName },
|
||||
params,
|
||||
options,
|
||||
);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get metric attributes
|
||||
*/
|
||||
export const invalidateGetMetricAttributes = async (
|
||||
queryClient: QueryClient,
|
||||
{ metricName }: GetMetricAttributesPathParameters,
|
||||
params?: GetMetricAttributesParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetMetricAttributesQueryKey({ metricName }, params) },
|
||||
options,
|
||||
);
|
||||
|
||||
@@ -145,28 +366,27 @@ export const invalidateGetMetricAlerts = async (
|
||||
* @summary Get metric dashboards
|
||||
*/
|
||||
export const getMetricDashboards = (
|
||||
params?: GetMetricDashboardsParams,
|
||||
{ metricName }: GetMetricDashboardsPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricDashboards200>({
|
||||
url: `/api/v2/metric/dashboards`,
|
||||
url: `/api/v2/metrics/${metricName}/dashboards`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricDashboardsQueryKey = (
|
||||
params?: GetMetricDashboardsParams,
|
||||
) => {
|
||||
return ['getMetricDashboards', ...(params ? [params] : [])] as const;
|
||||
export const getGetMetricDashboardsQueryKey = ({
|
||||
metricName,
|
||||
}: GetMetricDashboardsPathParameters) => {
|
||||
return ['getMetricDashboards'] as const;
|
||||
};
|
||||
|
||||
export const getGetMetricDashboardsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricDashboardsParams,
|
||||
{ metricName }: GetMetricDashboardsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
@@ -178,13 +398,18 @@ export const getGetMetricDashboardsQueryOptions = <
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey(params);
|
||||
queryOptions?.queryKey ?? getGetMetricDashboardsQueryKey({ metricName });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getMetricDashboards>>
|
||||
> = ({ signal }) => getMetricDashboards(params, signal);
|
||||
> = ({ signal }) => getMetricDashboards({ metricName }, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!metricName,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
TError,
|
||||
TData
|
||||
@@ -204,7 +429,7 @@ export function useGetMetricDashboards<
|
||||
TData = Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricDashboardsParams,
|
||||
{ metricName }: GetMetricDashboardsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricDashboards>>,
|
||||
@@ -213,7 +438,10 @@ export function useGetMetricDashboards<
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetMetricDashboardsQueryOptions(params, options);
|
||||
const queryOptions = getGetMetricDashboardsQueryOptions(
|
||||
{ metricName },
|
||||
options,
|
||||
);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -229,11 +457,11 @@ export function useGetMetricDashboards<
|
||||
*/
|
||||
export const invalidateGetMetricDashboards = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricDashboardsParams,
|
||||
{ metricName }: GetMetricDashboardsPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetMetricDashboardsQueryKey(params) },
|
||||
{ queryKey: getGetMetricDashboardsQueryKey({ metricName }) },
|
||||
options,
|
||||
);
|
||||
|
||||
@@ -245,28 +473,27 @@ export const invalidateGetMetricDashboards = async (
|
||||
* @summary Get metric highlights
|
||||
*/
|
||||
export const getMetricHighlights = (
|
||||
params?: GetMetricHighlightsParams,
|
||||
{ metricName }: GetMetricHighlightsPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricHighlights200>({
|
||||
url: `/api/v2/metric/highlights`,
|
||||
url: `/api/v2/metrics/${metricName}/highlights`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricHighlightsQueryKey = (
|
||||
params?: GetMetricHighlightsParams,
|
||||
) => {
|
||||
return ['getMetricHighlights', ...(params ? [params] : [])] as const;
|
||||
export const getGetMetricHighlightsQueryKey = ({
|
||||
metricName,
|
||||
}: GetMetricHighlightsPathParameters) => {
|
||||
return ['getMetricHighlights'] as const;
|
||||
};
|
||||
|
||||
export const getGetMetricHighlightsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricHighlightsParams,
|
||||
{ metricName }: GetMetricHighlightsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
@@ -278,13 +505,18 @@ export const getGetMetricHighlightsQueryOptions = <
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey(params);
|
||||
queryOptions?.queryKey ?? getGetMetricHighlightsQueryKey({ metricName });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getMetricHighlights>>
|
||||
> = ({ signal }) => getMetricHighlights(params, signal);
|
||||
> = ({ signal }) => getMetricHighlights({ metricName }, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!metricName,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
TError,
|
||||
TData
|
||||
@@ -304,7 +536,7 @@ export function useGetMetricHighlights<
|
||||
TData = Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricHighlightsParams,
|
||||
{ metricName }: GetMetricHighlightsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricHighlights>>,
|
||||
@@ -313,7 +545,10 @@ export function useGetMetricHighlights<
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetMetricHighlightsQueryOptions(params, options);
|
||||
const queryOptions = getGetMetricHighlightsQueryOptions(
|
||||
{ metricName },
|
||||
options,
|
||||
);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
@@ -329,11 +564,115 @@ export function useGetMetricHighlights<
|
||||
*/
|
||||
export const invalidateGetMetricHighlights = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricHighlightsParams,
|
||||
{ metricName }: GetMetricHighlightsPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetMetricHighlightsQueryKey(params) },
|
||||
{ queryKey: getGetMetricHighlightsQueryKey({ metricName }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
export const getMetricMetadata = (
|
||||
{ metricName }: GetMetricMetadataPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricMetadata200>({
|
||||
url: `/api/v2/metrics/${metricName}/metadata`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricMetadataQueryKey = ({
|
||||
metricName,
|
||||
}: GetMetricMetadataPathParameters) => {
|
||||
return ['getMetricMetadata'] as const;
|
||||
};
|
||||
|
||||
export const getGetMetricMetadataQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ metricName }: GetMetricMetadataPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey({ metricName });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>
|
||||
> = ({ signal }) => getMetricMetadata({ metricName }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!metricName,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetMetricMetadataQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>
|
||||
>;
|
||||
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
|
||||
export function useGetMetricMetadata<
|
||||
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ metricName }: GetMetricMetadataPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetMetricMetadataQueryOptions({ metricName }, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
export const invalidateGetMetricMetadata = async (
|
||||
queryClient: QueryClient,
|
||||
{ metricName }: GetMetricMetadataPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetMetricMetadataQueryKey({ metricName }) },
|
||||
options,
|
||||
);
|
||||
|
||||
@@ -439,189 +778,6 @@ export const useUpdateMetricMetadata = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint returns attribute keys and their unique values for a specified metric
|
||||
* @summary Get metric attributes
|
||||
*/
|
||||
export const getMetricAttributes = (
|
||||
metricsexplorertypesMetricAttributesRequestDTO: MetricsexplorertypesMetricAttributesRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricAttributes200>({
|
||||
url: `/api/v2/metrics/attributes`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: metricsexplorertypesMetricAttributesRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricAttributesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['getMetricAttributes'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
{ data: MetricsexplorertypesMetricAttributesRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return getMetricAttributes(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type GetMetricAttributesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>
|
||||
>;
|
||||
export type GetMetricAttributesMutationBody = MetricsexplorertypesMetricAttributesRequestDTO;
|
||||
export type GetMetricAttributesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get metric attributes
|
||||
*/
|
||||
export const useGetMetricAttributes = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof getMetricAttributes>>,
|
||||
TError,
|
||||
{ data: MetricsexplorertypesMetricAttributesRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getGetMetricAttributesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint returns metadata information like metric description, unit, type, temporality, monotonicity for a specified metric
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
export const getMetricMetadata = (
|
||||
params?: GetMetricMetadataParams,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetMetricMetadata200>({
|
||||
url: `/api/v2/metrics/metadata`,
|
||||
method: 'GET',
|
||||
params,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetMetricMetadataQueryKey = (
|
||||
params?: GetMetricMetadataParams,
|
||||
) => {
|
||||
return ['getMetricMetadata', ...(params ? [params] : [])] as const;
|
||||
};
|
||||
|
||||
export const getGetMetricMetadataQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricMetadataParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ?? getGetMetricMetadataQueryKey(params);
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>
|
||||
> = ({ signal }) => getMetricMetadata(params, signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetMetricMetadataQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>
|
||||
>;
|
||||
export type GetMetricMetadataQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
|
||||
export function useGetMetricMetadata<
|
||||
TData = Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
params?: GetMetricMetadataParams,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getMetricMetadata>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetMetricMetadataQueryOptions(params, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get metric metadata
|
||||
*/
|
||||
export const invalidateGetMetricMetadata = async (
|
||||
queryClient: QueryClient,
|
||||
params?: GetMetricMetadataParams,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetMetricMetadataQueryKey(params) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint provides list of metrics with their number of samples and timeseries for the given time range
|
||||
* @summary Get metrics statistics
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,15 +15,15 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetMyOrganization200,
|
||||
RenderErrorResponseDTO,
|
||||
TypesOrganizationDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,7 +15,9 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetOrgPreference200,
|
||||
GetOrgPreferencePathParameters,
|
||||
@@ -30,8 +31,6 @@ import type {
|
||||
UpdateUserPreferencePathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
191
frontend/src/api/generated/services/querier/index.ts
Normal file
191
frontend/src/api/generated/services/querier/index.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
MutationFunction,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
ReplaceVariables200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
|
||||
* @summary Query range
|
||||
*/
|
||||
export const queryRangeV5 = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<QueryRangeV5200>({
|
||||
url: `/api/v5/query_range`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getQueryRangeV5MutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['queryRangeV5'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return queryRangeV5(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type QueryRangeV5MutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>
|
||||
>;
|
||||
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Query range
|
||||
*/
|
||||
export const useQueryRangeV5 = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getQueryRangeV5MutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* Replace variables in a query
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const replaceVariables = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<ReplaceVariables200>({
|
||||
url: `/api/v5/substitute_vars`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getReplaceVariablesMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['replaceVariables'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return replaceVariables(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ReplaceVariablesMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof replaceVariables>>
|
||||
>;
|
||||
export type ReplaceVariablesMutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type ReplaceVariablesMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Replace variables
|
||||
*/
|
||||
export const useReplaceVariables = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof replaceVariables>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getReplaceVariablesMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
433
frontend/src/api/generated/services/role/index.ts
Normal file
433
frontend/src/api/generated/services/role/index.ts
Normal file
@@ -0,0 +1,433 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
CreateRole201,
|
||||
DeleteRolePathParameters,
|
||||
GetRole200,
|
||||
GetRolePathParameters,
|
||||
ListRoles200,
|
||||
PatchRolePathParameters,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint lists all roles
|
||||
* @summary List roles
|
||||
*/
|
||||
export const listRoles = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<ListRoles200>({
|
||||
url: `/api/v1/roles`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getListRolesQueryKey = () => {
|
||||
return ['listRoles'] as const;
|
||||
};
|
||||
|
||||
export const getListRolesQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof listRoles>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof listRoles>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getListRolesQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof listRoles>>> = ({
|
||||
signal,
|
||||
}) => listRoles(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof listRoles>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type ListRolesQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof listRoles>>
|
||||
>;
|
||||
export type ListRolesQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary List roles
|
||||
*/
|
||||
|
||||
export function useListRoles<
|
||||
TData = Awaited<ReturnType<typeof listRoles>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof listRoles>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getListRolesQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary List roles
|
||||
*/
|
||||
export const invalidateListRoles = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getListRolesQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint creates a role
|
||||
* @summary Create role
|
||||
*/
|
||||
export const createRole = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<CreateRole201>({
|
||||
url: `/api/v1/roles`,
|
||||
method: 'POST',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getCreateRoleMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['createRole'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
void
|
||||
> = () => {
|
||||
return createRole();
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type CreateRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof createRole>>
|
||||
>;
|
||||
|
||||
export type CreateRoleMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Create role
|
||||
*/
|
||||
export const useCreateRole = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof createRole>>,
|
||||
TError,
|
||||
void,
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getCreateRoleMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint deletes a role
|
||||
* @summary Delete role
|
||||
*/
|
||||
export const deleteRole = ({ id }: DeleteRolePathParameters) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/roles/${id}`,
|
||||
method: 'DELETE',
|
||||
});
|
||||
};
|
||||
|
||||
export const getDeleteRoleMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof deleteRole>>,
|
||||
TError,
|
||||
{ pathParams: DeleteRolePathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof deleteRole>>,
|
||||
TError,
|
||||
{ pathParams: DeleteRolePathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['deleteRole'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof deleteRole>>,
|
||||
{ pathParams: DeleteRolePathParameters }
|
||||
> = (props) => {
|
||||
const { pathParams } = props ?? {};
|
||||
|
||||
return deleteRole(pathParams);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type DeleteRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof deleteRole>>
|
||||
>;
|
||||
|
||||
export type DeleteRoleMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Delete role
|
||||
*/
|
||||
export const useDeleteRole = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof deleteRole>>,
|
||||
TError,
|
||||
{ pathParams: DeleteRolePathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof deleteRole>>,
|
||||
TError,
|
||||
{ pathParams: DeleteRolePathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getDeleteRoleMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint gets a role
|
||||
* @summary Get role
|
||||
*/
|
||||
export const getRole = (
|
||||
{ id }: GetRolePathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetRole200>({
|
||||
url: `/api/v1/roles/${id}`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetRoleQueryKey = ({ id }: GetRolePathParameters) => {
|
||||
return ['getRole'] as const;
|
||||
};
|
||||
|
||||
export const getGetRoleQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getRole>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ id }: GetRolePathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getRole>>, TError, TData>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetRoleQueryKey({ id });
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getRole>>> = ({
|
||||
signal,
|
||||
}) => getRole({ id }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!id,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<Awaited<ReturnType<typeof getRole>>, TError, TData> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
};
|
||||
|
||||
export type GetRoleQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getRole>>
|
||||
>;
|
||||
export type GetRoleQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get role
|
||||
*/
|
||||
|
||||
export function useGetRole<
|
||||
TData = Awaited<ReturnType<typeof getRole>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(
|
||||
{ id }: GetRolePathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getRole>>, TError, TData>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetRoleQueryOptions({ id }, options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get role
|
||||
*/
|
||||
export const invalidateGetRole = async (
|
||||
queryClient: QueryClient,
|
||||
{ id }: GetRolePathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetRoleQueryKey({ id }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint patches a role
|
||||
* @summary Patch role
|
||||
*/
|
||||
export const patchRole = ({ id }: PatchRolePathParameters) => {
|
||||
return GeneratedAPIInstance<string>({
|
||||
url: `/api/v1/roles/${id}`,
|
||||
method: 'PATCH',
|
||||
});
|
||||
};
|
||||
|
||||
export const getPatchRoleMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['patchRole'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
{ pathParams: PatchRolePathParameters }
|
||||
> = (props) => {
|
||||
const { pathParams } = props ?? {};
|
||||
|
||||
return patchRole(pathParams);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PatchRoleMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof patchRole>>
|
||||
>;
|
||||
|
||||
export type PatchRoleMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Patch role
|
||||
*/
|
||||
export const usePatchRole = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof patchRole>>,
|
||||
TError,
|
||||
{ pathParams: PatchRolePathParameters },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPatchRoleMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,7 +15,9 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
AuthtypesPostableEmailPasswordSessionDTO,
|
||||
AuthtypesPostableRotateTokenDTO,
|
||||
@@ -31,8 +32,6 @@ import type {
|
||||
RotateSession200,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,7 +4,6 @@
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
@@ -16,7 +15,9 @@ import type {
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
AcceptInvite201,
|
||||
ChangePasswordPathParameters,
|
||||
@@ -37,8 +38,9 @@ import type {
|
||||
RenderErrorResponseDTO,
|
||||
RevokeAPIKeyPathParameters,
|
||||
TypesChangePasswordRequestDTO,
|
||||
TypesPostableAPIKeyDTO,
|
||||
TypesPostableAcceptInviteDTO,
|
||||
TypesPostableAPIKeyDTO,
|
||||
TypesPostableForgotPasswordDTO,
|
||||
TypesPostableInviteDTO,
|
||||
TypesPostableResetPasswordDTO,
|
||||
TypesStorableAPIKeyDTO,
|
||||
@@ -48,8 +50,6 @@ import type {
|
||||
UpdateUserPathParameters,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
@@ -1568,3 +1568,87 @@ export const invalidateGetMyUser = async (
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint initiates the forgot password flow by sending a reset password email
|
||||
* @summary Forgot password
|
||||
*/
|
||||
export const forgotPassword = (
|
||||
typesPostableForgotPasswordDTO: TypesPostableForgotPasswordDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/factor_password/forgot`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: typesPostableForgotPasswordDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getForgotPasswordMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof forgotPassword>>,
|
||||
TError,
|
||||
{ data: TypesPostableForgotPasswordDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof forgotPassword>>,
|
||||
TError,
|
||||
{ data: TypesPostableForgotPasswordDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['forgotPassword'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof forgotPassword>>,
|
||||
{ data: TypesPostableForgotPasswordDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return forgotPassword(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type ForgotPasswordMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof forgotPassword>>
|
||||
>;
|
||||
export type ForgotPasswordMutationBody = TypesPostableForgotPasswordDTO;
|
||||
export type ForgotPasswordMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Forgot password
|
||||
*/
|
||||
export const useForgotPassword = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof forgotPassword>>,
|
||||
TError,
|
||||
{ data: TypesPostableForgotPasswordDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof forgotPassword>>,
|
||||
TError,
|
||||
{ data: TypesPostableForgotPasswordDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getForgotPasswordMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
|
||||
269
frontend/src/api/generated/services/zeus/index.ts
Normal file
269
frontend/src/api/generated/services/zeus/index.ts
Normal file
@@ -0,0 +1,269 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
InvalidateOptions,
|
||||
MutationFunction,
|
||||
QueryClient,
|
||||
QueryFunction,
|
||||
QueryKey,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
UseQueryOptions,
|
||||
UseQueryResult,
|
||||
} from 'react-query';
|
||||
import { useMutation, useQuery } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
GetHosts200,
|
||||
RenderErrorResponseDTO,
|
||||
ZeustypesPostableHostDTO,
|
||||
ZeustypesPostableProfileDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* This endpoint gets the host info from zeus.
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const getHosts = (signal?: AbortSignal) => {
|
||||
return GeneratedAPIInstance<GetHosts200>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetHostsQueryKey = () => {
|
||||
return ['getHosts'] as const;
|
||||
};
|
||||
|
||||
export const getGetHostsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey = queryOptions?.queryKey ?? getGetHostsQueryKey();
|
||||
|
||||
const queryFn: QueryFunction<Awaited<ReturnType<typeof getHosts>>> = ({
|
||||
signal,
|
||||
}) => getHosts(signal);
|
||||
|
||||
return { queryKey, queryFn, ...queryOptions } as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getHosts>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetHostsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getHosts>>
|
||||
>;
|
||||
export type GetHostsQueryError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
|
||||
export function useGetHosts<
|
||||
TData = Awaited<ReturnType<typeof getHosts>>,
|
||||
TError = RenderErrorResponseDTO
|
||||
>(options?: {
|
||||
query?: UseQueryOptions<Awaited<ReturnType<typeof getHosts>>, TError, TData>;
|
||||
}): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetHostsQueryOptions(options);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get host info from Zeus.
|
||||
*/
|
||||
export const invalidateGetHosts = async (
|
||||
queryClient: QueryClient,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetHostsQueryKey() },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint saves the host of a deployment to zeus.
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const putHost = (zeustypesPostableHostDTO: ZeustypesPostableHostDTO) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/hosts`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableHostDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutHostMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putHost'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
{ data: ZeustypesPostableHostDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putHost(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutHostMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putHost>>
|
||||
>;
|
||||
export type PutHostMutationBody = ZeustypesPostableHostDTO;
|
||||
export type PutHostMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put host in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutHost = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putHost>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableHostDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutHostMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint saves the profile of a deployment to zeus.
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const putProfile = (
|
||||
zeustypesPostableProfileDTO: ZeustypesPostableProfileDTO,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v2/zeus/profiles`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: zeustypesPostableProfileDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getPutProfileMutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['putProfile'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
{ data: ZeustypesPostableProfileDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return putProfile(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type PutProfileMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof putProfile>>
|
||||
>;
|
||||
export type PutProfileMutationBody = ZeustypesPostableProfileDTO;
|
||||
export type PutProfileMutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Put profile in Zeus for a deployment.
|
||||
*/
|
||||
export const usePutProfile = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof putProfile>>,
|
||||
TError,
|
||||
{ data: ZeustypesPostableProfileDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getPutProfileMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -1,6 +1,7 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
/* eslint-disable no-param-reassign */
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import { QueryClient } from 'react-query';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import post from 'api/v2/sessions/rotate/post';
|
||||
import afterLogin from 'AppRoutes/utils';
|
||||
@@ -12,18 +13,9 @@ import axios, {
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { Events } from 'constants/events';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { QueryClient } from 'react-query';
|
||||
import { eventEmitter } from 'utils/getEventEmitter';
|
||||
|
||||
import apiV1, {
|
||||
apiAlertManager,
|
||||
apiV2,
|
||||
apiV3,
|
||||
apiV4,
|
||||
apiV5,
|
||||
gatewayApiV1,
|
||||
gatewayApiV2,
|
||||
} from './apiV1';
|
||||
import apiV1, { apiAlertManager, apiV2, apiV3, apiV4, apiV5 } from './apiV1';
|
||||
import { Logout } from './utils';
|
||||
|
||||
const RESPONSE_TIMEOUT_THRESHOLD = 5000; // 5 seconds
|
||||
@@ -211,24 +203,6 @@ LogEventAxiosInstance.interceptors.response.use(
|
||||
LogEventAxiosInstance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V1
|
||||
export const GatewayApiV1Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV1}`,
|
||||
});
|
||||
|
||||
GatewayApiV1Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV1Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
// gateway Api V2
|
||||
export const GatewayApiV2Instance = axios.create({
|
||||
baseURL: `${ENVIRONMENT.baseURL}${gatewayApiV2}`,
|
||||
});
|
||||
|
||||
// generated API Instance
|
||||
export const GeneratedAPIInstance = axios.create({
|
||||
baseURL: ENVIRONMENT.baseURL,
|
||||
@@ -240,14 +214,6 @@ GeneratedAPIInstance.interceptors.response.use(
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
);
|
||||
|
||||
GatewayApiV2Instance.interceptors.request.use(interceptorsRequestResponse);
|
||||
//
|
||||
|
||||
AxiosAlertManagerInstance.interceptors.response.use(
|
||||
interceptorsResponse,
|
||||
interceptorRejected,
|
||||
|
||||
@@ -11,13 +11,10 @@ export const getMetricMetadata = async (
|
||||
): Promise<SuccessResponseV2<MetricMetadataResponse> | ErrorResponseV2> => {
|
||||
try {
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
const response = await axios.get(
|
||||
`/metrics/metadata?metricName=${encodedMetricName}`,
|
||||
{
|
||||
signal,
|
||||
headers,
|
||||
},
|
||||
);
|
||||
const response = await axios.get(`/metrics/${encodedMetricName}/metadata`, {
|
||||
signal,
|
||||
headers,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import { GatewayApiV2Instance } from 'api';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { UpdateProfileProps } from 'types/api/onboarding/types';
|
||||
|
||||
const updateProfile = async (
|
||||
props: UpdateProfileProps,
|
||||
): Promise<SuccessResponse<UpdateProfileProps> | ErrorResponse> => {
|
||||
const response = await GatewayApiV2Instance.put('/profiles/me', {
|
||||
...props,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data.data,
|
||||
};
|
||||
};
|
||||
|
||||
export default updateProfile;
|
||||
@@ -1,5 +1,5 @@
|
||||
import axios from 'api';
|
||||
import { useMutation, UseMutationResult } from 'react-query';
|
||||
import axios from 'api';
|
||||
|
||||
export interface DeleteDowntimeScheduleProps {
|
||||
id?: number;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
import axios from 'api';
|
||||
import { AxiosError, AxiosResponse } from 'axios';
|
||||
import { Option } from 'container/PlannedDowntime/PlannedDowntimeutils';
|
||||
import { useQuery, UseQueryResult } from 'react-query';
|
||||
|
||||
export type Recurrence = {
|
||||
startTime?: string | null;
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
|
||||
const deleteDomain = async (id: string): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.delete<null>(`/domains/${id}`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: null,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default deleteDomain;
|
||||
@@ -1,25 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { UpdatableAuthDomain } from 'types/api/v1/domains/put';
|
||||
|
||||
const put = async (
|
||||
props: UpdatableAuthDomain,
|
||||
): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.put<RawSuccessResponse<null>>(
|
||||
`/domains/${props.id}`,
|
||||
{ config: props.config },
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default put;
|
||||
@@ -1,24 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, RawSuccessResponse, SuccessResponseV2 } from 'types/api';
|
||||
import { GettableAuthDomain } from 'types/api/v1/domains/list';
|
||||
|
||||
const listAllDomain = async (): Promise<
|
||||
SuccessResponseV2<GettableAuthDomain[]>
|
||||
> => {
|
||||
try {
|
||||
const response = await axios.get<RawSuccessResponse<GettableAuthDomain[]>>(
|
||||
`/domains`,
|
||||
);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default listAllDomain;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user