Compare commits
8 Commits
fix/render
...
improveTra
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
334b717f23 | ||
|
|
d08b3313bb | ||
|
|
606f7c1272 | ||
|
|
fa8cc92105 | ||
|
|
47a03e8f7e | ||
|
|
ba68efd60f | ||
|
|
dce7ded49b | ||
|
|
3f66c20316 |
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
clickhouse:
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: clickhouse
|
||||
volumes:
|
||||
- ${PWD}/fs/etc/clickhouse-server/config.d/config.xml:/etc/clickhouse-server/config.d/config.xml
|
||||
@@ -23,8 +23,6 @@ services:
|
||||
retries: 3
|
||||
depends_on:
|
||||
- zookeeper
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
zookeeper:
|
||||
image: signoz/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
@@ -42,7 +40,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
image: signoz/signoz-schema-migrator:v0.129.2
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +53,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
image: signoz/signoz-schema-migrator:v0.129.2
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
signoz-otel-collector:
|
||||
image: signoz/signoz-otel-collector:v0.129.6
|
||||
image: signoz/signoz-otel-collector:v0.128.2
|
||||
container_name: signoz-otel-collector-dev
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
|
||||
113
.github/CODEOWNERS
vendored
@@ -1,137 +1,44 @@
|
||||
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
|
||||
# that they own.
|
||||
|
||||
/frontend/ @SigNoz/frontend-maintainers
|
||||
|
||||
# Onboarding
|
||||
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/deploy/ @SigNoz/devops
|
||||
.github @SigNoz/devops
|
||||
|
||||
# Scaffold Owners
|
||||
|
||||
/pkg/config/ @vikrantgupta25
|
||||
/pkg/errors/ @vikrantgupta25
|
||||
/pkg/factory/ @vikrantgupta25
|
||||
/pkg/types/ @vikrantgupta25
|
||||
/pkg/valuer/ @vikrantgupta25
|
||||
/cmd/ @vikrantgupta25
|
||||
.golangci.yml @vikrantgupta25
|
||||
/pkg/config/ @grandwizard28
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/valuer/ @grandwizard28
|
||||
/cmd/ @grandwizard28
|
||||
.golangci.yml @grandwizard28
|
||||
|
||||
# Zeus Owners
|
||||
|
||||
/pkg/zeus/ @vikrantgupta25
|
||||
/ee/zeus/ @vikrantgupta25
|
||||
/pkg/licensing/ @vikrantgupta25
|
||||
/ee/licensing/ @vikrantgupta25
|
||||
|
||||
# SQL Owners
|
||||
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
/ee/sqlmigration/ @vikrantgupta25
|
||||
/pkg/sqlschema/ @vikrantgupta25
|
||||
/ee/sqlschema/ @vikrantgupta25
|
||||
|
||||
# Analytics Owners
|
||||
|
||||
/pkg/analytics/ @vikrantgupta25
|
||||
/pkg/statsreporter/ @vikrantgupta25
|
||||
|
||||
# Querier Owners
|
||||
|
||||
/pkg/querier/ @srikanthccv
|
||||
/pkg/variables/ @srikanthccv
|
||||
/pkg/types/querybuildertypes/ @srikanthccv
|
||||
/pkg/types/telemetrytypes/ @srikanthccv
|
||||
/pkg/querybuilder/ @srikanthccv
|
||||
/pkg/telemetrylogs/ @srikanthccv
|
||||
/pkg/telemetrymetadata/ @srikanthccv
|
||||
/pkg/telemetrymetrics/ @srikanthccv
|
||||
/pkg/telemetrytraces/ @srikanthccv
|
||||
|
||||
# Metrics
|
||||
|
||||
/pkg/types/metrictypes/ @srikanthccv
|
||||
/pkg/types/metricsexplorertypes/ @srikanthccv
|
||||
/pkg/modules/metricsexplorer/ @srikanthccv
|
||||
/pkg/prometheus/ @srikanthccv
|
||||
|
||||
# APM
|
||||
|
||||
/pkg/types/servicetypes/ @srikanthccv
|
||||
/pkg/types/apdextypes/ @srikanthccv
|
||||
/pkg/modules/apdex/ @srikanthccv
|
||||
/pkg/modules/services/ @srikanthccv
|
||||
|
||||
# Dashboard
|
||||
|
||||
/pkg/types/dashboardtypes/ @srikanthccv
|
||||
/pkg/modules/dashboard/ @srikanthccv
|
||||
|
||||
# Rule/Alertmanager
|
||||
|
||||
/pkg/types/ruletypes/ @srikanthccv
|
||||
/pkg/types/alertmanagertypes @srikanthccv
|
||||
/pkg/alertmanager/ @srikanthccv
|
||||
/pkg/ruler/ @srikanthccv
|
||||
|
||||
# Correlation-adjacent
|
||||
|
||||
/pkg/contextlinks/ @srikanthccv
|
||||
/pkg/types/parsertypes/ @srikanthccv
|
||||
/pkg/queryparser/ @srikanthccv
|
||||
|
||||
# AuthN / AuthZ Owners
|
||||
|
||||
/pkg/authz/ @vikrantgupta25
|
||||
/ee/authz/ @vikrantgupta25
|
||||
/pkg/authn/ @vikrantgupta25
|
||||
/ee/authn/ @vikrantgupta25
|
||||
/pkg/modules/user/ @vikrantgupta25
|
||||
/pkg/modules/session/ @vikrantgupta25
|
||||
/pkg/modules/organization/ @vikrantgupta25
|
||||
/pkg/modules/authdomain/ @vikrantgupta25
|
||||
/pkg/modules/role/ @vikrantgupta25
|
||||
|
||||
# Integration tests
|
||||
|
||||
/tests/integration/ @vikrantgupta25
|
||||
|
||||
# OpenAPI types generator
|
||||
|
||||
/frontend/src/api @SigNoz/frontend-maintainers
|
||||
|
||||
# Dashboard Owners
|
||||
|
||||
/frontend/src/hooks/dashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/providers/Dashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Types
|
||||
|
||||
/frontend/src/api/types/dashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard List
|
||||
|
||||
/frontend/src/pages/DashboardsListPage/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/ListOfDashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Page
|
||||
|
||||
/frontend/src/pages/DashboardPage/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/DashboardContainer/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/GridCardLayout/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
|
||||
|
||||
## Public Dashboard Page
|
||||
|
||||
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
|
||||
|
||||
## UplotV2
|
||||
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
|
||||
89
.github/pull_request_template.md
vendored
@@ -1,85 +1,72 @@
|
||||
## Pull Request
|
||||
## 📄 Summary
|
||||
|
||||
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
|
||||
|
||||
---
|
||||
|
||||
### 📄 Summary
|
||||
> Why does this change exist?
|
||||
> What problem does it solve, and why is this the right approach?
|
||||
## ✅ Changes
|
||||
|
||||
|
||||
|
||||
#### Screenshots / Screen Recordings (if applicable)
|
||||
> Include screenshots or screen recordings that clearly show the behavior before the change and the result after the change. This helps reviewers quickly understand the impact and verify the update.
|
||||
|
||||
|
||||
#### Issues closed by this PR
|
||||
> Reference issues using `Closes #issue-number` to enable automatic closure on merge.
|
||||
- [ ] Feature: Brief description
|
||||
- [ ] Bug fix: Brief description
|
||||
|
||||
---
|
||||
|
||||
### ✅ Change Type
|
||||
_Select all that apply_
|
||||
## 🏷️ Required: Add Relevant Labels
|
||||
|
||||
- [ ] ✨ Feature
|
||||
- [ ] 🐛 Bug fix
|
||||
- [ ] ♻️ Refactor
|
||||
- [ ] 🛠️ Infra / Tooling
|
||||
- [ ] 🧪 Test-only
|
||||
> ⚠️ **Manually add appropriate labels in the PR sidebar**
|
||||
Please select one or more labels (as applicable):
|
||||
|
||||
ex:
|
||||
|
||||
- `frontend`
|
||||
- `backend`
|
||||
- `devops`
|
||||
- `bug`
|
||||
- `enhancement`
|
||||
- `ui`
|
||||
- `test`
|
||||
|
||||
---
|
||||
|
||||
### 🐛 Bug Context
|
||||
> Required if this PR fixes a bug
|
||||
## 👥 Reviewers
|
||||
|
||||
#### Root Cause
|
||||
> What caused the issue?
|
||||
> Regression, faulty assumption, edge case, refactor, etc.
|
||||
> Tag the relevant teams for review:
|
||||
|
||||
#### Fix Strategy
|
||||
> How does this PR address the root cause?
|
||||
- frontend / backend / devops
|
||||
|
||||
---
|
||||
|
||||
### 🧪 Testing Strategy
|
||||
> How was this change validated?
|
||||
## 🧪 How to Test
|
||||
|
||||
- Tests added/updated:
|
||||
- Manual verification:
|
||||
- Edge cases covered:
|
||||
<!-- Describe how reviewers can test this PR -->
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
---
|
||||
|
||||
### ⚠️ Risk & Impact Assessment
|
||||
> What could break? How do we recover?
|
||||
## 🔍 Related Issues
|
||||
|
||||
- Blast radius:
|
||||
- Potential regressions:
|
||||
- Rollback plan:
|
||||
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
|
||||
Closes #
|
||||
|
||||
---
|
||||
|
||||
### 📝 Changelog
|
||||
> Fill only if this affects users, APIs, UI, or documented behavior
|
||||
> Use **N/A** for internal or non-user-facing changes
|
||||
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
|
||||
|
||||
| Field | Value |
|
||||
|------|-------|
|
||||
| Deployment Type | Cloud / OSS / Enterprise |
|
||||
| Change Type | Feature / Bug Fix / Maintenance |
|
||||
| Description | User-facing summary |
|
||||
<!-- Add screenshots or GIFs to help visualize changes -->
|
||||
|
||||
---
|
||||
|
||||
### 📋 Checklist
|
||||
- [ ] Tests added or explicitly not required
|
||||
- [ ] Manually tested
|
||||
- [ ] Breaking changes documented
|
||||
- [ ] Backward compatibility considered
|
||||
## 📋 Checklist
|
||||
|
||||
- [ ] Dev Review
|
||||
- [ ] Test cases added (Unit/ Integration / E2E)
|
||||
- [ ] Manually tested the changes
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 👀 Notes for Reviewers
|
||||
|
||||
<!-- Anything reviewers should keep in mind while reviewing -->
|
||||
|
||||
---
|
||||
|
||||
9
.github/workflows/build-community.yaml
vendored
@@ -3,8 +3,8 @@ name: build-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -62,20 +62,21 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_NAME: signoz-community
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: community-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/community
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-s -w
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/community/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
10
.github/workflows/build-enterprise.yaml
vendored
@@ -69,8 +69,6 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
echo 'DOCS_BASE_URL="https://signoz.io"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -86,7 +84,7 @@ jobs:
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
@@ -95,13 +93,13 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-s -w
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -109,8 +107,10 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
10
.github/workflows/build-staging.yaml
vendored
@@ -68,8 +68,6 @@ jobs:
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
echo 'DOCS_BASE_URL="https://staging.signoz.io"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -94,13 +92,13 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
GO_INPUT_ARTIFACT_CACHE_KEY: staging-jsbuild-${{ github.sha }}
|
||||
GO_INPUT_ARTIFACT_PATH: frontend/build
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-s -w
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -108,8 +106,10 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
@@ -125,4 +125,4 @@ jobs:
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: '{"deployment": "${{ needs.prepare.outputs.deployment }}", "signoz_version": "${{ needs.prepare.outputs.version }}"}'
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
|
||||
7
.github/workflows/commitci.yaml
vendored
@@ -25,10 +25,3 @@ jobs:
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'pkg' directory"
|
||||
fi
|
||||
|
||||
if grep -R --include="*.go" '.*/ee/.*' cmd/community/; then
|
||||
echo "Error: Found references to 'ee' packages in 'cmd/community' directory"
|
||||
exit 1
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'cmd/community' directory"
|
||||
fi
|
||||
|
||||
30
.github/workflows/goci.yaml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_TEST_CONTEXT: ./...
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
fmt:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
lint:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
deps:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
GO_VERSION: 1.24
|
||||
GO_VERSION: 1.23
|
||||
build:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: qemu-install
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: aarch64-install
|
||||
@@ -65,10 +65,6 @@ jobs:
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: node-install
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: docker-community
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -77,19 +73,3 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
make docker-build-enterprise
|
||||
openapi:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
- name: generate-openapi
|
||||
run: |
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
||||
|
||||
16
.github/workflows/gor-signoz-community.yaml
vendored
@@ -3,8 +3,8 @@ name: gor-signoz-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -21,10 +21,6 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
- name: node-setup
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
@@ -62,7 +58,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -93,7 +89,7 @@ jobs:
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: "~> v2"
|
||||
version: '~> v2'
|
||||
args: release --config ${{ env.CONFIG_PATH }} --clean --split
|
||||
workdir: .
|
||||
env:
|
||||
@@ -126,7 +122,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
@@ -151,7 +147,7 @@ jobs:
|
||||
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: "~> v2"
|
||||
version: '~> v2'
|
||||
args: continue --merge
|
||||
workdir: .
|
||||
env:
|
||||
|
||||
20
.github/workflows/gor-signoz.yaml
vendored
@@ -3,8 +3,8 @@ name: gor-signoz
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -35,14 +35,8 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
|
||||
echo 'DOCS_BASE_URL="https://signoz.io"' >> .env
|
||||
- name: node-setup
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -78,7 +72,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
- name: cross-compilation-tools
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -109,7 +103,7 @@ jobs:
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: "~> v2"
|
||||
version: '~> v2'
|
||||
args: release --config ${{ env.CONFIG_PATH }} --clean --split
|
||||
workdir: .
|
||||
env:
|
||||
@@ -141,7 +135,7 @@ jobs:
|
||||
- name: setup-go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
go-version: "1.23"
|
||||
|
||||
# copy the caches from build
|
||||
- name: get-sha
|
||||
@@ -166,7 +160,7 @@ jobs:
|
||||
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: "~> v2"
|
||||
version: '~> v2'
|
||||
args: continue --merge
|
||||
workdir: .
|
||||
env:
|
||||
|
||||
64
.github/workflows/integrationci.yaml
vendored
@@ -9,53 +9,22 @@ on:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
fmtlint:
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- name: install
|
||||
run: |
|
||||
cd tests/integration && uv sync
|
||||
- name: fmt
|
||||
run: |
|
||||
make py-fmt
|
||||
- name: lint
|
||||
run: |
|
||||
make py-lint
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
src:
|
||||
- bootstrap
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- dashboard
|
||||
- logspipelines
|
||||
- preference
|
||||
- auth
|
||||
- querier
|
||||
- role
|
||||
- ttl
|
||||
- alerts
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 25.5.6
|
||||
- 25.10.1
|
||||
schema-migrator-version:
|
||||
- v0.129.7
|
||||
- v0.128.1
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
@@ -69,32 +38,15 @@ jobs:
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- name: install
|
||||
- name: poetry
|
||||
run: |
|
||||
cd tests/integration && uv sync
|
||||
- name: webdriver
|
||||
run: |
|
||||
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
|
||||
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" | sudo tee -a /etc/apt/sources.list.d/google-chrome.list
|
||||
sudo apt-get update -qqy
|
||||
sudo apt-get -qqy install google-chrome-stable
|
||||
CHROME_VERSION=$(google-chrome-stable --version)
|
||||
CHROME_FULL_VERSION=${CHROME_VERSION%%.*}
|
||||
CHROME_MAJOR_VERSION=${CHROME_FULL_VERSION//[!0-9]}
|
||||
sudo rm /etc/apt/sources.list.d/google-chrome.list
|
||||
export CHROMEDRIVER_VERSION=`curl -s https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_${CHROME_MAJOR_VERSION%%.*}`
|
||||
curl -L -O "https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip"
|
||||
unzip chromedriver-linux64.zip
|
||||
chmod +x chromedriver-linux64/chromedriver
|
||||
sudo mv chromedriver-linux64/chromedriver /usr/local/bin/chromedriver
|
||||
chromedriver -version
|
||||
google-chrome-stable --version
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
- name: run
|
||||
run: |
|
||||
cd tests/integration && \
|
||||
uv run pytest \
|
||||
poetry run pytest \
|
||||
--basetemp=./tmp/ \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
|
||||
13
.github/workflows/jsci.yaml
vendored
@@ -17,23 +17,10 @@ jobs:
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: install
|
||||
run: cd frontend && yarn install
|
||||
- name: tsc
|
||||
run: cd frontend && yarn tsc
|
||||
tsc2:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
uses: signoz/primus.workflows/.github/workflows/js-tsc.yaml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
test:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
|
||||
16
.gitignore
vendored
@@ -1,9 +1,6 @@
|
||||
|
||||
node_modules
|
||||
|
||||
.vscode
|
||||
!.vscode/settings.json
|
||||
|
||||
deploy/docker/environment_tiny/common_test
|
||||
frontend/node_modules
|
||||
frontend/.pnp
|
||||
@@ -15,6 +12,7 @@ frontend/coverage
|
||||
|
||||
# production
|
||||
frontend/build
|
||||
frontend/.vscode
|
||||
frontend/.yarnclean
|
||||
frontend/.temp_cache
|
||||
frontend/test-results
|
||||
@@ -33,6 +31,7 @@ frontend/src/constants/env.ts
|
||||
|
||||
.idea
|
||||
|
||||
**/.vscode
|
||||
**/build
|
||||
**/storage
|
||||
**/locust-scripts/__pycache__/
|
||||
@@ -50,13 +49,13 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
e2e/node_modules/
|
||||
@@ -87,8 +86,6 @@ queries.active
|
||||
.devenv/**/tmp/**
|
||||
.qodo
|
||||
|
||||
.dev
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
@@ -106,6 +103,7 @@ dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
@@ -221,6 +219,8 @@ cython_debug/
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
@@ -228,6 +228,4 @@ cython_debug/
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
|
||||
# cursor files
|
||||
frontend/.cursor/
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
@@ -1,64 +1,34 @@
|
||||
version: "2"
|
||||
linters:
|
||||
default: none
|
||||
default: standard
|
||||
enable:
|
||||
- bodyclose
|
||||
- depguard
|
||||
- errcheck
|
||||
- forbidigo
|
||||
- govet
|
||||
- iface
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- wastedassign
|
||||
- depguard
|
||||
- iface
|
||||
- unparam
|
||||
- unused
|
||||
settings:
|
||||
depguard:
|
||||
rules:
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: errors
|
||||
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: go.uber.org/zap
|
||||
desc: Do not use zap logger. Use slog instead.
|
||||
forbidigo:
|
||||
forbid:
|
||||
- pattern: fmt.Errorf
|
||||
- pattern: ^(fmt\.Print.*|print|println)$
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
key-naming-case: snake
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- pkg/query-service
|
||||
- ee/query-service
|
||||
- scripts/
|
||||
- tmp/
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
msg-style: lowercased
|
||||
key-naming-case: snake
|
||||
depguard:
|
||||
rules:
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
issues:
|
||||
exclude-dirs:
|
||||
- "pkg/query-service"
|
||||
- "ee/query-service"
|
||||
- "scripts/"
|
||||
|
||||
17
.mockery.yml
@@ -1,17 +0,0 @@
|
||||
# Link to template variables: https://pkg.go.dev/github.com/vektra/mockery/v3/config#TemplateData
|
||||
template: testify
|
||||
packages:
|
||||
github.com/SigNoz/signoz/pkg/alertmanager:
|
||||
config:
|
||||
all: true
|
||||
dir: '{{.InterfaceDir}}/alertmanagertest'
|
||||
filename: "alertmanager.go"
|
||||
structname: 'Mock{{.InterfaceName}}'
|
||||
pkgname: '{{.SrcPackageName}}test'
|
||||
github.com/SigNoz/signoz/pkg/tokenizer:
|
||||
config:
|
||||
all: true
|
||||
dir: '{{.InterfaceDir}}/tokenizertest'
|
||||
filename: "tokenizer.go"
|
||||
structname: 'Mock{{.InterfaceName}}'
|
||||
pkgname: '{{.SrcPackageName}}test'
|
||||
15
.vscode/settings.json
vendored
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"eslint.workingDirectories": [
|
||||
"./frontend"
|
||||
],
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"prettier.requireConfig": true,
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "golang.go"
|
||||
}
|
||||
}
|
||||
@@ -78,5 +78,4 @@ Need assistance? Join our Slack community:
|
||||
|
||||
- Set up your [development environment](docs/contributing/development.md)
|
||||
- Deploy and observe [SigNoz in action with OpenTelemetry Demo Application](docs/otel-demo-docs.md)
|
||||
- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future.
|
||||
- Write [integration tests](docs/contributing/go/integration.md)
|
||||
- Explore the [SigNoz Community Advocate Program](ADVOCATE.md), which recognises contributors who support the community, share their expertise, and help shape SigNoz's future.
|
||||
68
Makefile
@@ -72,12 +72,6 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
@@ -86,13 +80,14 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -103,13 +98,14 @@ go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -118,9 +114,9 @@ $(GO_BUILD_ARCHS_COMMUNITY): go-build-community-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)-community"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
else \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
fi
|
||||
|
||||
|
||||
@@ -131,9 +127,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
@@ -143,9 +139,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
@@ -202,40 +198,14 @@ docker-buildx-enterprise: go-build-enterprise js-build
|
||||
##############################################################
|
||||
.PHONY: py-fmt
|
||||
py-fmt: ## Run black for integration tests
|
||||
@cd tests/integration && uv run black .
|
||||
@cd tests/integration && poetry run black .
|
||||
|
||||
.PHONY: py-lint
|
||||
py-lint: ## Run lint for integration tests
|
||||
@cd tests/integration && uv run isort .
|
||||
@cd tests/integration && uv run autoflake .
|
||||
@cd tests/integration && uv run pylint .
|
||||
|
||||
.PHONY: py-test-setup
|
||||
py-test-setup: ## Runs integration tests
|
||||
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --reuse --capture=no src/bootstrap/setup.py::test_setup
|
||||
|
||||
.PHONY: py-test-teardown
|
||||
py-test-teardown: ## Runs integration tests with teardown
|
||||
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --teardown --capture=no src/bootstrap/setup.py::test_teardown
|
||||
@cd tests/integration && poetry run isort .
|
||||
@cd tests/integration && poetry run autoflake .
|
||||
@cd tests/integration && poetry run pylint .
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
|
||||
.PHONY: py-clean
|
||||
py-clean: ## Clear all pycache and pytest cache from tests directory recursively
|
||||
@echo ">> cleaning python cache files from tests directory"
|
||||
@find tests -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
@find tests -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
@find tests -type f -name "*.pyc" -delete 2>/dev/null || true
|
||||
@find tests -type f -name "*.pyo" -delete 2>/dev/null || true
|
||||
@echo ">> python cache cleaned"
|
||||
|
||||
|
||||
##############################################################
|
||||
# generate commands
|
||||
##############################################################
|
||||
.PHONY: gen-mocks
|
||||
gen-mocks:
|
||||
@echo ">> Generating mocks"
|
||||
@mockery --config .mockery.yml
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
13
README.md
@@ -66,17 +66,6 @@ Read [more](https://signoz.io/metrics-and-dashboards/).
|
||||
|
||||

|
||||
|
||||
### LLM Observability
|
||||
|
||||
Monitor and debug your LLM applications with comprehensive observability. Track LLM calls, analyze token usage, monitor performance, and gain insights into your AI application's behavior in production.
|
||||
|
||||
SigNoz LLM observability helps you understand how your language models are performing, identify issues with prompts and responses, track token usage and costs, and optimize your AI applications for better performance and reliability.
|
||||
|
||||
[Get started with LLM Observability →](https://signoz.io/docs/llm-observability/)
|
||||
|
||||

|
||||
|
||||
|
||||
### Alerts
|
||||
|
||||
Use alerts in SigNoz to get notified when anything unusual happens in your application. You can set alerts on any type of telemetry signal (logs, metrics, traces), create thresholds and set up a notification channel to get notified. Advanced features like alert history and anomaly detection can help you create smarter alerts.
|
||||
@@ -247,7 +236,7 @@ Not sure how to get started? Just ping us on `#contributing` in our [slack commu
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
- [Vibhu Pandey](https://github.com/grandwizard28)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
@@ -12,6 +12,12 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/community
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -30,6 +36,8 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -13,7 +13,6 @@ func main() {
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
cmd.RegisterGenerate(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -3,29 +3,20 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
@@ -41,7 +32,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
Short: "Run the SigNoz server",
|
||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -58,9 +49,19 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
// print the version
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
jwt,
|
||||
zeus.Config{},
|
||||
noopzeus.NewProviderFactory(),
|
||||
licensing.Config{},
|
||||
@@ -75,28 +76,13 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
},
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
|
||||
},
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
|
||||
return implrole.NewSetter(implrole.NewStore(store), authz)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
server, err := app.NewServer(config, signoz)
|
||||
server, err := app.NewServer(config, signoz, jwt)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
||||
return err
|
||||
|
||||
@@ -2,7 +2,9 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
@@ -10,10 +12,9 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
)
|
||||
|
||||
func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.DeprecatedFlags) (signoz.Config, error) {
|
||||
func NewSigNozConfig(ctx context.Context, flags signoz.DeprecatedFlags) (signoz.Config, error) {
|
||||
config, err := signoz.NewConfig(
|
||||
ctx,
|
||||
logger,
|
||||
config.ResolverConfig{
|
||||
Uris: []string{"env:"},
|
||||
ProviderFactories: []config.ProviderFactory{
|
||||
@@ -29,3 +30,16 @@ func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.Depr
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func NewJWTSecret(_ context.Context, _ *slog.Logger) string {
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
if len(jwtSecret) == 0 {
|
||||
fmt.Println("🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!")
|
||||
fmt.Println("SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application.")
|
||||
fmt.Println("Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access.")
|
||||
fmt.Println("Please set the SIGNOZ_JWT_SECRET environment variable immediately.")
|
||||
fmt.Println("For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
}
|
||||
|
||||
return jwtSecret
|
||||
}
|
||||
|
||||
@@ -12,6 +12,12 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/enterprise
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -31,8 +37,11 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
FROM golang:1.24-bullseye
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.23-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
@@ -32,6 +39,8 @@ COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
COPY --from=build /opt/build ./web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./cmd/ ./cmd/
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
COPY --from=build /opt/build ./web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
@@ -13,7 +13,6 @@ func main() {
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
cmd.RegisterGenerate(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -6,33 +6,17 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/ee/modules/role/implrole"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
enterprisezeus "github.com/SigNoz/signoz/ee/zeus"
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -51,7 +35,7 @@ func registerServer(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
Short: "Run the SigNoz server",
|
||||
FParseErrWhitelist: cobra.FParseErrWhitelist{UnknownFlags: true},
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), logger, flags)
|
||||
config, err := cmd.NewSigNozConfig(currCmd.Context(), flags)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -70,14 +54,17 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
jwt,
|
||||
enterprisezeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
enterpriselicensing.Config(24*time.Hour, 3),
|
||||
@@ -97,47 +84,13 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
},
|
||||
sqlstoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
samlCallbackAuthN, err := samlcallbackauthn.New(ctx, store, licensing)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
oidcCallbackAuthN, err := oidccallbackauthn.New(store, licensing, providerSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
authNs, err := signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
authNs[authtypes.AuthNProviderSAML] = samlCallbackAuthN
|
||||
authNs[authtypes.AuthNProviderOIDC] = oidcCallbackAuthN
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
|
||||
},
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
|
||||
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
server, err := enterpriseapp.NewServer(config, signoz)
|
||||
server, err := enterpriseapp.NewServer(config, signoz, jwt)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
||||
return err
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func RegisterGenerate(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
var generateCmd = &cobra.Command{
|
||||
Use: "generate",
|
||||
Short: "Generate artifacts",
|
||||
SilenceUsage: true,
|
||||
SilenceErrors: true,
|
||||
CompletionOptions: cobra.CompletionOptions{DisableDefaultCmd: true},
|
||||
}
|
||||
|
||||
registerGenerateOpenAPI(generateCmd)
|
||||
|
||||
parentCmd.AddCommand(generateCmd)
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func registerGenerateOpenAPI(parentCmd *cobra.Command) {
|
||||
openapiCmd := &cobra.Command{
|
||||
Use: "openapi",
|
||||
Short: "Generate OpenAPI schema for SigNoz",
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
return runGenerateOpenAPI(currCmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
parentCmd.AddCommand(openapiCmd)
|
||||
}
|
||||
|
||||
func runGenerateOpenAPI(ctx context.Context) error {
|
||||
instrumentation, err := instrumentation.New(ctx, instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}}, version.Info, "signoz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
openapi, err := signoz.NewOpenAPI(ctx, instrumentation)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := openapi.CreateAndWrite("docs/api/openapi.yml"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
97
cmd/zap.go
@@ -1,10 +1,6 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
"go.uber.org/zap/zapcore" //nolint:depguard
|
||||
)
|
||||
@@ -14,97 +10,6 @@ func newZapLogger() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
|
||||
// Extract sampling config before building the logger.
|
||||
// We need to disable sampling in the config and apply it manually later
|
||||
// to ensure correct core ordering. See filteringCore documentation for details.
|
||||
samplerConfig := config.Sampling
|
||||
config.Sampling = nil
|
||||
|
||||
logger, _ := config.Build()
|
||||
|
||||
// Wrap with custom core wrapping to filter certain log entries.
|
||||
// The order of wrapping is important:
|
||||
// 1. First wrap with filteringCore
|
||||
// 2. Then wrap with sampler
|
||||
//
|
||||
// This creates the call chain: sampler -> filteringCore -> ioCore
|
||||
//
|
||||
// During logging:
|
||||
// - sampler.Check decides whether to sample the log entry
|
||||
// - If sampled, filteringCore.Check is called
|
||||
// - filteringCore adds itself to CheckedEntry.cores
|
||||
// - All cores in CheckedEntry.cores have their Write method called
|
||||
// - filteringCore.Write can now filter the entry before passing to ioCore
|
||||
//
|
||||
// If we didn't disable the sampler above, filteringCore would have wrapped
|
||||
// sampler. By calling sampler.Check we would have allowed it to call
|
||||
// ioCore.Check that adds itself to CheckedEntry.cores. Then ioCore.Write
|
||||
// would have bypassed our checks, making filtering impossible.
|
||||
return logger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core {
|
||||
core = &filteringCore{core}
|
||||
if samplerConfig != nil {
|
||||
core = zapcore.NewSamplerWithOptions(
|
||||
core,
|
||||
time.Second,
|
||||
samplerConfig.Initial,
|
||||
samplerConfig.Thereafter,
|
||||
)
|
||||
}
|
||||
return core
|
||||
}))
|
||||
}
|
||||
|
||||
// filteringCore wraps a zapcore.Core to filter out log entries based on a
|
||||
// custom logic.
|
||||
//
|
||||
// Note: This core must be positioned before the sampler in the core chain
|
||||
// to ensure Write is called. See newZapLogger for ordering details.
|
||||
type filteringCore struct {
|
||||
zapcore.Core
|
||||
}
|
||||
|
||||
// filter determines whether a log entry should be written based on its fields.
|
||||
// Returns false if the entry should be suppressed, true otherwise.
|
||||
//
|
||||
// Current filters:
|
||||
// - context.Canceled: These are expected errors from cancelled operations,
|
||||
// and create noise in logs.
|
||||
func (c *filteringCore) filter(fields []zapcore.Field) bool {
|
||||
for _, field := range fields {
|
||||
if field.Type == zapcore.ErrorType {
|
||||
if loggedErr, ok := field.Interface.(error); ok {
|
||||
// Suppress logs containing context.Canceled errors
|
||||
if errors.Is(loggedErr, context.Canceled) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// With implements zapcore.Core.With
|
||||
// It returns a new copy with the added context.
|
||||
func (c *filteringCore) With(fields []zapcore.Field) zapcore.Core {
|
||||
return &filteringCore{c.Core.With(fields)}
|
||||
}
|
||||
|
||||
// Check implements zapcore.Core.Check.
|
||||
// It adds this core to the CheckedEntry if the log level is enabled,
|
||||
// ensuring that Write will be called for this entry.
|
||||
func (c *filteringCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
|
||||
if c.Enabled(ent.Level) {
|
||||
return ce.AddCore(ent, c)
|
||||
}
|
||||
return ce
|
||||
}
|
||||
|
||||
// Write implements zapcore.Core.Write.
|
||||
// It filters log entries based on their fields before delegating to the wrapped core.
|
||||
func (c *filteringCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
|
||||
if !c.filter(fields) {
|
||||
return nil
|
||||
}
|
||||
return c.Core.Write(ent, fields)
|
||||
return logger
|
||||
}
|
||||
|
||||
@@ -1,15 +1,8 @@
|
||||
##################### SigNoz Configuration Example #####################
|
||||
#
|
||||
#
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
##################### Global #####################
|
||||
global:
|
||||
# the url under which the signoz apiserver is externally reachable.
|
||||
external_url: <unset>
|
||||
# the url where the SigNoz backend receives telemetry data (traces, metrics, logs) from instrumented applications.
|
||||
ingestion_url: <unset>
|
||||
|
||||
##################### Version #####################
|
||||
version:
|
||||
banner:
|
||||
@@ -54,10 +47,10 @@ cache:
|
||||
provider: memory
|
||||
# memory: Uses in-memory caching.
|
||||
memory:
|
||||
# Max items for the in-memory cache (10x the entries)
|
||||
num_counters: 100000
|
||||
# Total cost in bytes allocated bounded cache
|
||||
max_cost: 67108864
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanup_interval: 1m
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
@@ -65,7 +58,7 @@ cache:
|
||||
# The port on which the Redis server is running. Default is usually 6379.
|
||||
port: 6379
|
||||
# The password for authenticating with the Redis server, if required.
|
||||
password:
|
||||
password:
|
||||
# The Redis database number to use
|
||||
db: 0
|
||||
|
||||
@@ -78,10 +71,6 @@ sqlstore:
|
||||
sqlite:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
# Mode is the mode to use for the sqlite database.
|
||||
mode: delete
|
||||
# BusyTimeout is the timeout for the sqlite database to wait for a lock.
|
||||
busy_timeout: 10s
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
@@ -148,7 +137,10 @@ prometheus:
|
||||
##################### Alertmanager #####################
|
||||
alertmanager:
|
||||
# Specifies the alertmanager provider to use.
|
||||
provider: signoz
|
||||
provider: legacy
|
||||
legacy:
|
||||
# The API URL (with prefix) of the legacy Alertmanager instance.
|
||||
api_url: http://localhost:9093/api
|
||||
signoz:
|
||||
# The poll interval for periodically syncing the alertmanager with the config in the store.
|
||||
poll_interval: 1m
|
||||
@@ -249,54 +241,8 @@ statsreporter:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
|
||||
##################### Gateway (License only) #####################
|
||||
gateway:
|
||||
# The URL of the gateway's api.
|
||||
url: http://localhost:8080
|
||||
|
||||
##################### Tokenizer #####################
|
||||
tokenizer:
|
||||
# Specifies the tokenizer provider to use.
|
||||
provider: jwt
|
||||
lifetime:
|
||||
# The duration for which a user can be idle before being required to authenticate.
|
||||
idle: 168h
|
||||
# The duration for which a user can remain logged in before being asked to login.
|
||||
max: 720h
|
||||
rotation:
|
||||
# The interval to rotate tokens in.
|
||||
interval: 30m
|
||||
# The duration for which the previous token pair remains valid after a token pair is rotated.
|
||||
duration: 60s
|
||||
jwt:
|
||||
# The secret to sign the JWT tokens.
|
||||
secret: secret
|
||||
opaque:
|
||||
gc:
|
||||
# The interval to perform garbage collection.
|
||||
interval: 1h
|
||||
token:
|
||||
# The maximum number of tokens a user can have. This limits the number of concurrent sessions a user can have.
|
||||
max_per_user: 5
|
||||
|
||||
##################### Flagger #####################
|
||||
flagger:
|
||||
# Config are the overrides for the feature flags which come directly from the config file.
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
integer:
|
||||
object:
|
||||
|
||||
##################### User #####################
|
||||
user:
|
||||
password:
|
||||
reset:
|
||||
# Whether to allow users to reset their password themselves.
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"url": "https://context7.com/signoz/signoz",
|
||||
"public_key": "pk_6g9GfjdkuPEIDuTGAxnol"
|
||||
}
|
||||
@@ -11,7 +11,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
labels:
|
||||
@@ -37,8 +37,6 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
@@ -65,7 +63,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
@@ -176,7 +174,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.110.1
|
||||
image: signoz/signoz:v0.93.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -195,7 +193,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
- SIGNOZ_JWT_SECRET=secret
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
@@ -209,7 +207,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.13
|
||||
image: signoz/signoz-otel-collector:v0.129.2
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +231,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
image: signoz/signoz-schema-migrator:v0.129.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -11,7 +11,7 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
deploy:
|
||||
labels:
|
||||
@@ -36,8 +36,6 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
@@ -62,7 +60,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
command:
|
||||
- bash
|
||||
- -c
|
||||
@@ -117,7 +115,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.110.1
|
||||
image: signoz/signoz:v0.93.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +148,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.13
|
||||
image: signoz/signoz-otel-collector:v0.129.2
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +174,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
image: signoz/signoz-schema-migrator:v0.129.2
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -28,10 +21,6 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -77,11 +66,6 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -93,20 +77,16 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
exporters: [clickhousetraces]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
exporters: [clickhouselogsexporter]
|
||||
|
||||
@@ -10,7 +10,7 @@ x-common: &common
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -40,8 +40,6 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
@@ -67,7 +65,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
@@ -179,7 +177,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.110.1}
|
||||
image: signoz/signoz:${VERSION:-v0.93.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +211,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.13}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +237,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.2}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +248,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.2}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -9,7 +9,8 @@ x-common: &common
|
||||
max-file: "3"
|
||||
x-clickhouse-defaults: &clickhouse-defaults
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
# addding non LTS version due to this fix https://github.com/ClickHouse/ClickHouse/commit/32caf8716352f45c1b617274c7508c86b7d1afab
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
tty: true
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
@@ -35,8 +36,6 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
nofile:
|
||||
soft: 262144
|
||||
hard: 262144
|
||||
environment:
|
||||
- CLICKHOUSE_SKIP_USER_SETUP=1
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: signoz/zookeeper:3.7.1
|
||||
@@ -62,7 +61,7 @@ x-db-depend: &db-depend
|
||||
services:
|
||||
init-clickhouse:
|
||||
!!merge <<: *common
|
||||
image: clickhouse/clickhouse-server:25.5.6
|
||||
image: clickhouse/clickhouse-server:24.1.2-alpine
|
||||
container_name: signoz-init-clickhouse
|
||||
command:
|
||||
- bash
|
||||
@@ -111,7 +110,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.110.1}
|
||||
image: signoz/signoz:${VERSION:-v0.93.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +143,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.13}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.2}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +165,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.2}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +177,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.2}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,10 +1,3 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -28,10 +21,6 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -77,11 +66,6 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -93,20 +77,16 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
exporters: [clickhousetraces]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
exporters: [signozclickhousemetrics]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
exporters: [clickhouselogsexporter]
|
||||
|
||||
4344
docs/api/openapi.yml
@@ -13,6 +13,8 @@ Before diving in, make sure you have these tools installed:
|
||||
- Download from [go.dev/dl](https://go.dev/dl/)
|
||||
- Check [go.mod](../../go.mod#L3) for the minimum version
|
||||
|
||||
- **GCC** - Required for CGO dependencies
|
||||
- Download from [gcc.gnu.org](https://gcc.gnu.org/)
|
||||
|
||||
- **Node** - Powers our frontend
|
||||
- Download from [nodejs.org](https://nodejs.org)
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
# Flagger
|
||||
|
||||
Flagger is SigNoz's feature flagging system built on top of the [OpenFeature](https://openfeature.dev/) standard. It provides a unified interface for evaluating feature flags across the application, allowing features to be enabled, disabled, or configured dynamically without code changes.
|
||||
|
||||
> 💡 **Note**: OpenFeature is a CNCF project that provides a vendor-agnostic feature flagging API, making it easy to switch providers without changing application code.
|
||||
|
||||
## How does it work?
|
||||
|
||||
Flagger consists of three main components:
|
||||
|
||||
1. **Registry** (`pkg/flagger/registry.go`) - Contains all available feature flags with their metadata and default values
|
||||
2. **Flagger** (`pkg/flagger/flagger.go`) - The consumer-facing interface for evaluating feature flags
|
||||
3. **Providers** (`pkg/flagger/<provider>flagger/`) - Implementations that supply feature flag values (e.g., `configflagger` for config-based flags)
|
||||
|
||||
The evaluation flow works as follows:
|
||||
|
||||
1. The caller requests a feature flag value via the `Flagger` interface
|
||||
2. Flagger checks the registry to validate the flag exists and get its default value
|
||||
3. Each registered provider is queried for an override value
|
||||
4. If a provider returns a value different from the default, that value is returned
|
||||
5. Otherwise, the default value from the registry is returned
|
||||
|
||||
## How to add a new feature flag?
|
||||
|
||||
### 1. Register the flag in the registry
|
||||
|
||||
Add your feature flag definition in `pkg/flagger/registry.go`:
|
||||
|
||||
```go
|
||||
var (
|
||||
// Export the feature name for use in evaluations
|
||||
FeatureMyNewFeature = featuretypes.MustNewName("my_new_feature")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
registry, err := featuretypes.NewRegistry(
|
||||
// ...existing features...
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureMyNewFeature,
|
||||
Kind: featuretypes.KindBoolean, // or KindString, KindFloat, KindInt, KindObject
|
||||
Stage: featuretypes.StageStable, // or StageAlpha, StageBeta
|
||||
Description: "Controls whether my new feature is enabled",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
)
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
> 💡 **Note**: Feature names must match the regex `^[a-z_]+$` (lowercase letters and underscores only).
|
||||
|
||||
### 2. Configure the feature flag value (optional)
|
||||
|
||||
To override the default value, add an entry in your configuration file:
|
||||
|
||||
```yaml
|
||||
flagger:
|
||||
config:
|
||||
boolean:
|
||||
my_new_feature: true
|
||||
```
|
||||
|
||||
Supported configuration types:
|
||||
|
||||
| Type | Config Key | Go Type |
|
||||
|------|------------|---------|
|
||||
| Boolean | `boolean` | `bool` |
|
||||
| String | `string` | `string` |
|
||||
| Float | `float` | `float64` |
|
||||
| Integer | `integer` | `int64` |
|
||||
| Object | `object` | `any` |
|
||||
|
||||
## How to evaluate a feature flag?
|
||||
|
||||
Use the `Flagger` interface to evaluate feature flags. The interface provides typed methods for each value type:
|
||||
|
||||
```go
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
)
|
||||
|
||||
func DoSomething(ctx context.Context, flagger flagger.Flagger) error {
|
||||
// Create an evaluation context (typically with org ID)
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
|
||||
// Evaluate with error handling
|
||||
enabled, err := flagger.Boolean(ctx, flagger.FeatureMyNewFeature, evalCtx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if enabled {
|
||||
// Feature is enabled
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
### Empty variants
|
||||
|
||||
For cases where you want to use a default value on error (and log the error), use the `*OrEmpty` methods:
|
||||
|
||||
```go
|
||||
func DoSomething(ctx context.Context, flagger flagger.Flagger) {
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
|
||||
// Returns false on error and logs the error
|
||||
if flagger.BooleanOrEmpty(ctx, flagger.FeatureMyNewFeature, evalCtx) {
|
||||
// Feature is enabled
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Available evaluation methods
|
||||
|
||||
| Method | Return Type | Empty Variant Default |
|
||||
|--------|-------------|---------------------|
|
||||
| `Boolean()` | `(bool, error)` | `false` |
|
||||
| `String()` | `(string, error)` | `""` |
|
||||
| `Float()` | `(float64, error)` | `0.0` |
|
||||
| `Int()` | `(int64, error)` | `0` |
|
||||
| `Object()` | `(any, error)` | `struct{}{}` |
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Always define feature flags in the registry (`pkg/flagger/registry.go`) before using them
|
||||
- Use descriptive feature names that clearly indicate what the flag controls
|
||||
- Prefer `*OrEmpty` methods for non-critical features to avoid error handling overhead
|
||||
- Export feature name variables (e.g., `FeatureMyNewFeature`) for type-safe usage across packages
|
||||
- Consider the feature's lifecycle stage (`Alpha`, `Beta`, `Stable`) when defining it
|
||||
- Providers are evaluated in order; the first non-default value wins
|
||||
@@ -1,179 +0,0 @@
|
||||
# Handler
|
||||
|
||||
Handlers in SigNoz are responsible for exposing module functionality over HTTP. They are thin adapters that:
|
||||
|
||||
- Decode incoming HTTP requests
|
||||
- Call the appropriate module layer
|
||||
- Return structured responses (or errors) in a consistent format
|
||||
- Describe themselves for OpenAPI generation
|
||||
|
||||
They are **not** the place for complex business logic; that belongs in modules (for example, `pkg/modules/user`, `pkg/modules/session`, etc).
|
||||
|
||||
## How are handlers structured?
|
||||
|
||||
At a high level, a typical flow looks like this:
|
||||
|
||||
1. A `Handler` interface is defined in the module (for example, `user.Handler`, `session.Handler`, `organization.Handler`).
|
||||
2. The `apiserver` provider wires those handlers into HTTP routes using Gorilla `mux.Router`.
|
||||
|
||||
Each route wraps a module handler method with the following:
|
||||
- Authorization middleware (from `pkg/http/middleware`)
|
||||
- A generic HTTP `handler.Handler` (from `pkg/http/handler`)
|
||||
- An `OpenAPIDef` that describes the operation for OpenAPI generation
|
||||
|
||||
For example, in `pkg/apiserver/signozapiserver`:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v1/invite", handler.New(
|
||||
provider.authZ.AdminAccess(provider.userHandler.CreateInvite),
|
||||
handler.OpenAPIDef{
|
||||
ID: "CreateInvite",
|
||||
Tags: []string{"users"},
|
||||
Summary: "Create invite",
|
||||
Description: "This endpoint creates an invite for a user",
|
||||
Request: new(types.PostableInvite),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(types.Invite),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
In this pattern:
|
||||
|
||||
- `provider.userHandler.CreateInvite` is a handler method.
|
||||
- `provider.authZ.AdminAccess(...)` wraps that method with authorization checks and context setup.
|
||||
- `handler.New` converts it into an HTTP handler and wires it to OpenAPI via the `OpenAPIDef`.
|
||||
|
||||
## How to write a new handler method?
|
||||
|
||||
When adding a new endpoint:
|
||||
|
||||
1. Add a method to the appropriate module `Handler` interface.
|
||||
2. Implement that method in the module.
|
||||
3. Register the method in `signozapiserver` with the correct route, HTTP method, auth, and `OpenAPIDef`.
|
||||
|
||||
### 1. Extend an existing `Handler` interface or create a new one
|
||||
|
||||
Find the module in `pkg/modules/<name>` and extend its `Handler` interface with a new method that receives an `http.ResponseWriter` and `*http.Request`. For example:
|
||||
|
||||
```go
|
||||
type Handler interface {
|
||||
// existing methods...
|
||||
CreateThing(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
```
|
||||
|
||||
Keep the method focused on HTTP concerns and delegate business logic to the module.
|
||||
|
||||
### 2. Implement the handler method
|
||||
|
||||
In the module implementation, implement the new method. A typical implementation:
|
||||
|
||||
- Extracts authentication and organization context from `req.Context()`
|
||||
- Decodes the request body into a `types.*` struct using the `binding` package
|
||||
- Calls module functions
|
||||
- Uses the `render` package to write responses or errors
|
||||
|
||||
```go
|
||||
func (h *handler) CreateThing(rw http.ResponseWriter, req *http.Request) {
|
||||
// Extract authentication and organization context from req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Decode the request body into a `types.*` struct using the `binding` package
|
||||
var in types.PostableThing
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Call module functions
|
||||
out, err := h.module.CreateThing(req.Context(), claims.OrgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Use the `render` package to write responses or errors
|
||||
render.Success(rw, http.StatusCreated, out)
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Register the handler in `signozapiserver`
|
||||
|
||||
In `pkg/apiserver/signozapiserver`, add a route in the appropriate `add*Routes` function (`addUserRoutes`, `addSessionRoutes`, `addOrgRoutes`, etc.). The pattern is:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v1/things", handler.New(
|
||||
provider.authZ.AdminAccess(provider.thingHandler.CreateThing),
|
||||
handler.OpenAPIDef{
|
||||
ID: "CreateThing",
|
||||
Tags: []string{"things"},
|
||||
Summary: "Create thing",
|
||||
Description: "This endpoint creates a thing",
|
||||
Request: new(types.PostableThing),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(types.GettableThing),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Update the OpenAPI spec
|
||||
|
||||
Run the following command to update the OpenAPI spec:
|
||||
|
||||
```bash
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
```
|
||||
|
||||
This will update the OpenAPI spec in `docs/api/openapi.yml` to reflect the new endpoint.
|
||||
|
||||
## How does OpenAPI integration work?
|
||||
|
||||
The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAPIDef`. This drives the generated OpenAPI document.
|
||||
|
||||
- **ID**: A unique identifier for the operation (used as the `operationId`).
|
||||
- **Tags**: Logical grouping for the operation (for example, `"users"`, `"sessions"`, `"orgs"`).
|
||||
- **Summary / Description**: Human-friendly documentation.
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
- **SuccessStatusCode**: The HTTP status for successful responses (for example, `http.StatusOK`, `http.StatusCreated`, `http.StatusNoContent`).
|
||||
- **ErrorStatusCodes**: Additional error status codes beyond the standard ones automatically added by `handler.New`.
|
||||
- **SecuritySchemes**: Auth mechanisms and scopes required by the operation.
|
||||
|
||||
The generic handler:
|
||||
|
||||
- Automatically appends `401`, `403`, and `500` to `ErrorStatusCodes` when appropriate.
|
||||
- Registers request and response schemas with the OpenAPI reflector so they appear in `docs/api/openapi.yml`.
|
||||
|
||||
See existing examples in:
|
||||
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
@@ -1,215 +0,0 @@
|
||||
# Integration Tests
|
||||
|
||||
SigNoz uses integration tests to verify that different components work together correctly in a real environment. These tests run against actual services (ClickHouse, PostgreSQL, etc.) to ensure end-to-end functionality.
|
||||
|
||||
## How to set up the integration test environment?
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Before running integration tests, ensure you have the following installed:
|
||||
|
||||
- Python 3.13+
|
||||
- [uv](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
- Docker (for containerized services)
|
||||
|
||||
### Initial Setup
|
||||
|
||||
1. Navigate to the integration tests directory:
|
||||
```bash
|
||||
cd tests/integration
|
||||
```
|
||||
|
||||
2. Install dependencies using uv:
|
||||
```bash
|
||||
uv sync
|
||||
```
|
||||
|
||||
> **_NOTE:_** the build backend could throw an error while installing `psycopg2`, pleae see https://www.psycopg.org/docs/install.html#build-prerequisites
|
||||
|
||||
### Starting the Test Environment
|
||||
|
||||
To spin up all the containers necessary for writing integration tests and keep them running:
|
||||
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
|
||||
```
|
||||
|
||||
This command will:
|
||||
- Start all required services (ClickHouse, PostgreSQL, Zookeeper, etc.)
|
||||
- Keep containers running due to the `--reuse` flag
|
||||
- Verify that the setup is working correctly
|
||||
|
||||
### Stopping the Test Environment
|
||||
|
||||
When you're done writing integration tests, clean up the environment:
|
||||
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
|
||||
```
|
||||
|
||||
This will destroy the running integration test setup and clean up resources.
|
||||
|
||||
## Understanding the Integration Test Framework
|
||||
|
||||
Python and pytest form the foundation of the integration testing framework. Testcontainers are used to spin up disposable integration environments. Wiremock is used to spin up **test doubles** of other services.
|
||||
|
||||
- **Why Python/pytest?** It's expressive, low-boilerplate, and has powerful fixture capabilities that make integration testing straightforward. Extensive libraries for HTTP requests, JSON handling, and data analysis (numpy) make it easier to test APIs and verify data
|
||||
- **Why testcontainers?** They let us spin up isolated dependencies that match our production environment without complex setup.
|
||||
- **Why wiremock?** Well maintained, documented and extensible.
|
||||
|
||||
```
|
||||
.
|
||||
├── conftest.py
|
||||
├── fixtures
|
||||
│ ├── __init__.py
|
||||
│ ├── auth.py
|
||||
│ ├── clickhouse.py
|
||||
│ ├── fs.py
|
||||
│ ├── http.py
|
||||
│ ├── migrator.py
|
||||
│ ├── network.py
|
||||
│ ├── postgres.py
|
||||
│ ├── signoz.py
|
||||
│ ├── sql.py
|
||||
│ ├── sqlite.py
|
||||
│ ├── types.py
|
||||
│ └── zookeeper.py
|
||||
├── uv.lock
|
||||
├── pyproject.toml
|
||||
└── src
|
||||
└── bootstrap
|
||||
├── __init__.py
|
||||
├── 01_database.py
|
||||
├── 02_register.py
|
||||
└── 03_license.py
|
||||
```
|
||||
|
||||
Each test suite follows some important principles:
|
||||
|
||||
1. **Organization**: Test suites live under `src/` in self-contained packages. Fixtures (a pytest concept) live inside `fixtures/`.
|
||||
2. **Execution Order**: Files are prefixed with two-digit numbers (`01_`, `02_`, `03_`) to ensure sequential execution.
|
||||
3. **Time Constraints**: Each suite should complete in under 10 minutes (setup takes ~4 mins).
|
||||
|
||||
### Test Suite Design
|
||||
|
||||
Test suites should target functional domains or subsystems within SigNoz. When designing a test suite, consider these principles:
|
||||
|
||||
- **Functional Cohesion**: Group tests around a specific capability or service boundary
|
||||
- **Data Flow**: Follow the path of data through related components
|
||||
- **Change Patterns**: Components frequently modified together should be tested together
|
||||
|
||||
The exact boundaries for modules are intentionally flexible, allowing teams to define logical groupings based on their specific context and knowledge of the system.
|
||||
|
||||
Eg: The **bootstrap** integration test suite validates core system functionality:
|
||||
|
||||
- Database initialization
|
||||
- Version check
|
||||
|
||||
Other test suites can be **pipelines, auth, querier.**
|
||||
|
||||
## How to write an integration test?
|
||||
|
||||
Now start writing an integration test. Create a new file `src/bootstrap/05_version.py` and paste the following:
|
||||
|
||||
```python
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
def test_version(signoz: types.SigNoz) -> None:
|
||||
response = requests.get(signoz.self.host_config.get("/api/v1/version"), timeout=2)
|
||||
logger.info(response)
|
||||
```
|
||||
|
||||
We have written a simple test which calls the `version` endpoint of the container in step 1. In **order to just run this function, run the following command:**
|
||||
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/05_version.py::test_version
|
||||
```
|
||||
|
||||
> Note: The `--reuse` flag is used to reuse the environment if it is already running. Always use this flag when writing and running integration tests. If you don't use this flag, the environment will be destroyed and recreated every time you run the test.
|
||||
|
||||
Here's another example of how to write a more comprehensive integration test:
|
||||
|
||||
```python
|
||||
from http import HTTPStatus
|
||||
import requests
|
||||
from fixtures import types
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
def test_user_registration(signoz: types.SigNoz) -> None:
|
||||
"""Test user registration functionality."""
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/register"),
|
||||
json={
|
||||
"name": "testuser",
|
||||
"orgId": "",
|
||||
"orgName": "test.org",
|
||||
"email": "test@example.com",
|
||||
"password": "password123Z$",
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["setupCompleted"] is True
|
||||
```
|
||||
|
||||
## How to run integration tests?
|
||||
|
||||
### Running All Tests
|
||||
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/
|
||||
```
|
||||
|
||||
### Running Specific Test Categories
|
||||
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
|
||||
|
||||
# Run querier tests
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/querier/
|
||||
# Run auth tests
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/auth/
|
||||
```
|
||||
|
||||
### Running Individual Tests
|
||||
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
|
||||
|
||||
# Run test_register in file 01_register.py in passwordauthn suite
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/passwordauthn/01_register.py::test_register
|
||||
```
|
||||
|
||||
## How to configure different options for integration tests?
|
||||
|
||||
Tests can be configured using pytest options:
|
||||
|
||||
- `--sqlstore-provider` - Choose database provider (default: postgres)
|
||||
- `--postgres-version` - PostgreSQL version (default: 15)
|
||||
- `--clickhouse-version` - ClickHouse version (default: 25.5.6)
|
||||
- `--zookeeper-version` - Zookeeper version (default: 3.7.1)
|
||||
|
||||
Example:
|
||||
```bash
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Always use the `--reuse` flag** when setting up the environment to keep containers running
|
||||
- **Use the `--teardown` flag** when cleaning up to avoid resource leaks
|
||||
- **Follow the naming convention** with two-digit numeric prefixes (`01_`, `02_`) for test execution order
|
||||
- **Use proper timeouts** in HTTP requests to avoid hanging tests
|
||||
- **Clean up test data** between tests to avoid interference
|
||||
- **Use descriptive test names** that clearly indicate what is being tested
|
||||
- **Leverage fixtures** for common setup and authentication
|
||||
- **Test both success and failure scenarios** to ensure robust functionality
|
||||
@@ -1,301 +0,0 @@
|
||||
# Onboarding Configuration Guide
|
||||
|
||||
This guide explains how to add new data sources to the SigNoz onboarding flow. The onboarding configuration controls the "New Source" / "Get Started" experience in SigNoz Cloud.
|
||||
|
||||
## Configuration File Location
|
||||
|
||||
The configuration is located at:
|
||||
|
||||
```
|
||||
frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json
|
||||
```
|
||||
|
||||
## JSON Structure Overview
|
||||
|
||||
The configuration file is a JSON array containing data source objects. Each object represents a selectable option in the onboarding flow.
|
||||
|
||||
## Data Source Object Keys
|
||||
|
||||
### Required Keys
|
||||
|
||||
| Key | Type | Description |
|
||||
|-----|------|-------------|
|
||||
| `dataSource` | `string` | Unique identifier for the data source (kebab-case, e.g., `"aws-ec2"`) |
|
||||
| `label` | `string` | Display name shown to users (e.g., `"AWS EC2"`) |
|
||||
| `tags` | `string[]` | Array of category tags for grouping (e.g., `["AWS"]`, `["database"]`) |
|
||||
| `module` | `string` | Destination module after onboarding completion |
|
||||
| `imgUrl` | `string` | Path to the logo/icon **(SVG required)** (e.g., `"/Logos/ec2.svg"`) |
|
||||
|
||||
### Optional Keys
|
||||
|
||||
| Key | Type | Description |
|
||||
|-----|------|-------------|
|
||||
| `link` | `string` | Docs link to redirect to (e.g., `"/docs/aws-monitoring/ec2/"`) |
|
||||
| `relatedSearchKeywords` | `string[]` | Array of keywords for search functionality |
|
||||
| `question` | `object` | Nested question object for multi-step flows |
|
||||
| `internalRedirect` | `boolean` | When `true`, navigates within the app instead of showing docs |
|
||||
|
||||
## Module Values
|
||||
|
||||
The `module` key determines where users are redirected after completing onboarding:
|
||||
|
||||
| Value | Destination |
|
||||
|-------|-------------|
|
||||
| `apm` | APM / Traces |
|
||||
| `logs` | Logs Explorer |
|
||||
| `metrics` | Metrics Explorer |
|
||||
| `dashboards` | Dashboards |
|
||||
| `infra-monitoring-hosts` | Infrastructure Monitoring - Hosts |
|
||||
| `infra-monitoring-k8s` | Infrastructure Monitoring - Kubernetes |
|
||||
| `messaging-queues-kafka` | Messaging Queues - Kafka |
|
||||
| `messaging-queues-celery` | Messaging Queues - Celery |
|
||||
| `integrations` | Integrations page |
|
||||
| `home` | Home page |
|
||||
| `api-monitoring` | API Monitoring |
|
||||
|
||||
## Question Object Structure
|
||||
|
||||
The `question` object enables multi-step selection flows:
|
||||
|
||||
```json
|
||||
{
|
||||
"question": {
|
||||
"desc": "What would you like to monitor?",
|
||||
"type": "select",
|
||||
"helpText": "Choose the telemetry type you want to collect.",
|
||||
"helpLink": "/docs/azure-monitoring/overview/",
|
||||
"helpLinkText": "Read the guide →",
|
||||
"options": [
|
||||
{
|
||||
"key": "logging",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/logging/"
|
||||
},
|
||||
{
|
||||
"key": "metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/metrics/"
|
||||
},
|
||||
{
|
||||
"key": "tracing",
|
||||
"label": "Traces",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/tracing/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Question Keys
|
||||
|
||||
| Key | Type | Description |
|
||||
|-----|------|-------------|
|
||||
| `desc` | `string` | Question text displayed to the user |
|
||||
| `type` | `string` | Currently only `"select"` is supported |
|
||||
| `helpText` | `string` | (Optional) Additional help text below the question |
|
||||
| `helpLink` | `string` | (Optional) Docs link for the help section |
|
||||
| `helpLinkText` | `string` | (Optional) Text for the help link (default: "Learn more →") |
|
||||
| `options` | `array` | Array of option objects |
|
||||
|
||||
## Option Object Structure
|
||||
|
||||
Options can be simple (direct link) or nested (with another question):
|
||||
|
||||
### Simple Option (Direct Link)
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "aws-ec2-logs",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/docs/userguide/collect_logs_from_file/"
|
||||
}
|
||||
```
|
||||
|
||||
### Option with Internal Redirect
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "aws-ec2-metrics-one-click",
|
||||
"label": "One Click AWS",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/integrations?integration=aws-integration&service=ec2",
|
||||
"internalRedirect": true
|
||||
}
|
||||
```
|
||||
|
||||
> **Important**: Set `internalRedirect: true` only for internal app routes (like `/integrations?...`). Docs links should NOT have this flag.
|
||||
|
||||
### Nested Option (Multi-step Flow)
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "aws-ec2-metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"question": {
|
||||
"desc": "How would you like to set up monitoring?",
|
||||
"helpText": "Choose your setup method.",
|
||||
"options": [...]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple Data Source (Direct Link)
|
||||
|
||||
```json
|
||||
{
|
||||
"dataSource": "aws-elb",
|
||||
"label": "AWS ELB",
|
||||
"tags": ["AWS"],
|
||||
"module": "logs",
|
||||
"relatedSearchKeywords": [
|
||||
"aws",
|
||||
"aws elb",
|
||||
"elb logs",
|
||||
"elastic load balancer"
|
||||
],
|
||||
"imgUrl": "/Logos/elb.svg",
|
||||
"link": "/docs/aws-monitoring/elb/"
|
||||
}
|
||||
```
|
||||
|
||||
### Data Source with Single Question Level
|
||||
|
||||
```json
|
||||
{
|
||||
"dataSource": "app-service",
|
||||
"label": "App Service",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"tags": ["Azure"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": ["azure", "app service"],
|
||||
"question": {
|
||||
"desc": "What telemetry data do you want to visualise?",
|
||||
"type": "select",
|
||||
"options": [
|
||||
{
|
||||
"key": "logging",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/logging/"
|
||||
},
|
||||
{
|
||||
"key": "metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/metrics/"
|
||||
},
|
||||
{
|
||||
"key": "tracing",
|
||||
"label": "Traces",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/tracing/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Data Source with Nested Questions (2-3 Levels)
|
||||
|
||||
```json
|
||||
{
|
||||
"dataSource": "aws-ec2",
|
||||
"label": "AWS EC2",
|
||||
"tags": ["AWS"],
|
||||
"module": "logs",
|
||||
"relatedSearchKeywords": ["aws", "aws ec2", "ec2 logs", "ec2 metrics"],
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"question": {
|
||||
"desc": "What would you like to monitor for AWS EC2?",
|
||||
"type": "select",
|
||||
"helpText": "Choose the type of telemetry data you want to collect.",
|
||||
"options": [
|
||||
{
|
||||
"key": "aws-ec2-logs",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/docs/userguide/collect_logs_from_file/"
|
||||
},
|
||||
{
|
||||
"key": "aws-ec2-metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"question": {
|
||||
"desc": "How would you like to set up EC2 Metrics monitoring?",
|
||||
"helpText": "One Click uses AWS CloudWatch integration. Manual setup uses OpenTelemetry.",
|
||||
"helpLink": "/docs/aws-monitoring/one-click-vs-manual/",
|
||||
"helpLinkText": "Read the comparison guide →",
|
||||
"options": [
|
||||
{
|
||||
"key": "aws-ec2-metrics-one-click",
|
||||
"label": "One Click AWS",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/integrations?integration=aws-integration&service=ec2",
|
||||
"internalRedirect": true
|
||||
},
|
||||
{
|
||||
"key": "aws-ec2-metrics-manual",
|
||||
"label": "Manual Setup",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### 1. Tags
|
||||
|
||||
- Use existing tags when possible: `AWS`, `Azure`, `GCP`, `database`, `logs`, `apm/traces`, `infrastructure monitoring`, `LLM Monitoring`
|
||||
- Tags are used for grouping in the sidebar
|
||||
- Every data source must have at least one tag
|
||||
|
||||
### 2. Search Keywords
|
||||
|
||||
- Include variations of the name (e.g., `"aws ec2"`, `"ec2"`, `"ec2 logs"`)
|
||||
- Include common misspellings or alternative names
|
||||
- Keep keywords lowercase and alphabetically sorted
|
||||
|
||||
### 3. Logos
|
||||
|
||||
- Place logo files in `public/Logos/`
|
||||
- Use SVG format
|
||||
- Reference as `"/Logos/your-logo.svg"`
|
||||
|
||||
### 4. Links
|
||||
|
||||
- Docs links should start with `/docs/` (will be prefixed with DOCS_BASE_URL)
|
||||
- Internal app links should start with `/integrations`, `/services`, etc.
|
||||
- Only use `internalRedirect: true` for internal app routes
|
||||
|
||||
### 5. Keys
|
||||
|
||||
- Use kebab-case for `dataSource` and option `key` values
|
||||
- Make keys descriptive and unique
|
||||
- Follow the pattern: `{service}-{subtype}-{action}` (e.g., `aws-ec2-metrics-one-click`)
|
||||
|
||||
## Adding a New Data Source
|
||||
|
||||
1. Add your data source object to the JSON array
|
||||
2. Ensure the logo exists in `public/Logos/`
|
||||
3. Test the flow locally with `yarn dev`
|
||||
4. Validation:
|
||||
- Navigate to the [onboarding page](http://localhost:3301/get-started-with-signoz-cloud) on your local machine
|
||||
- Data source appears in the list
|
||||
- Search keywords work correctly
|
||||
- All links redirect to the correct pages
|
||||
- Questions display correct help text and links
|
||||
- Tags are used for grouping in the UI sidebar
|
||||
- Clicking on Configure redirects to the correct page
|
||||
@@ -103,19 +103,9 @@ Remember to replace the region and ingestion key with proper values as obtained
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
||||
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
```
|
||||
|
||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
|
||||
@@ -232,7 +232,7 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
|
||||
// moving avg of the previous period series + z score threshold * std dev of the series
|
||||
// moving avg of the previous period series - z score threshold * std dev of the series
|
||||
func (p *BaseSeasonalProvider) getBounds(
|
||||
series, predictedSeries, weekSeries *qbtypes.TimeSeries,
|
||||
series, predictedSeries *qbtypes.TimeSeries,
|
||||
zScoreThreshold float64,
|
||||
) (*qbtypes.TimeSeries, *qbtypes.TimeSeries) {
|
||||
upperBoundSeries := &qbtypes.TimeSeries{
|
||||
@@ -246,8 +246,8 @@ func (p *BaseSeasonalProvider) getBounds(
|
||||
}
|
||||
|
||||
for idx, curr := range series.Values {
|
||||
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(weekSeries)
|
||||
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(weekSeries)
|
||||
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
|
||||
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
|
||||
upperBoundSeries.Values = append(upperBoundSeries.Values, &qbtypes.TimeSeriesValue{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: upperBound,
|
||||
@@ -398,6 +398,8 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
aggOfInterest := result.Aggregations[0]
|
||||
|
||||
for _, series := range aggOfInterest.Series {
|
||||
stdDev := p.getStdDev(series)
|
||||
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
|
||||
|
||||
pastPeriodSeries := p.getMatchingSeries(ctx, pastPeriodResult, series)
|
||||
currentSeasonSeries := p.getMatchingSeries(ctx, currentSeasonResult, series)
|
||||
@@ -405,9 +407,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
past2SeasonSeries := p.getMatchingSeries(ctx, past2SeasonResult, series)
|
||||
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
|
||||
|
||||
stdDev := p.getStdDev(currentSeasonSeries)
|
||||
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
|
||||
|
||||
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
@@ -436,7 +435,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
upperBoundSeries, lowerBoundSeries := p.getBounds(
|
||||
series,
|
||||
predictedSeries,
|
||||
currentSeasonSeries,
|
||||
zScoreThreshold,
|
||||
)
|
||||
aggOfInterest.UpperBoundSeries = append(aggOfInterest.UpperBoundSeries, upperBoundSeries)
|
||||
|
||||
@@ -1,240 +0,0 @@
|
||||
package oidccallbackauthn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
const (
|
||||
redirectPath string = "/api/v1/complete/oidc"
|
||||
)
|
||||
|
||||
var defaultScopes []string = []string{"email", "profile", oidc.ScopeOpenID}
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store authtypes.AuthNStore
|
||||
licensing licensing.Licensing
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn")
|
||||
|
||||
httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
settings: settings,
|
||||
store: store,
|
||||
licensing: licensing,
|
||||
httpClient: httpClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
|
||||
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderOIDC {
|
||||
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "domain type is not oidc")
|
||||
}
|
||||
|
||||
_, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, siteURL, authDomain)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return oauth2Config.AuthCodeURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String()), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtypes.CallbackIdentity, error) {
|
||||
if err := query.Get("error"); err != "" {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: error while authenticating").WithAdditional(query.Get("error_description"))
|
||||
}
|
||||
|
||||
state, err := authtypes.NewStateFromString(query.Get("state"))
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "oidc: invalid state").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
oidcProvider, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, state.URL, authDomain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx = context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client())
|
||||
token, err := oauth2Config.Exchange(ctx, query.Get("code"))
|
||||
if err != nil {
|
||||
var retrieveError *oauth2.RetrieveError
|
||||
if errors.As(err, &retrieveError) {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body))
|
||||
}
|
||||
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get token").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
claims, err := a.claimsFromIDToken(ctx, authDomain, oidcProvider, token)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if claims == nil && authDomain.AuthDomainConfig().OIDC.GetUserInfo {
|
||||
claims, err = a.claimsFromUserInfo(ctx, oidcProvider, token)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
emailClaim, ok := claims[authDomain.AuthDomainConfig().OIDC.ClaimMapping.Email].(string)
|
||||
if !ok {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email in claims")
|
||||
}
|
||||
|
||||
email, err := valuer.NewEmail(emailClaim)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to parse email").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if !authDomain.AuthDomainConfig().OIDC.InsecureSkipEmailVerified {
|
||||
emailVerifiedClaim, ok := claims["email_verified"].(bool)
|
||||
if !ok {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email_verified in claims")
|
||||
}
|
||||
|
||||
if !emailVerifiedClaim {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "oidc: email is not verified")
|
||||
}
|
||||
}
|
||||
|
||||
name := ""
|
||||
if nameClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Name; nameClaim != "" {
|
||||
if n, ok := claims[nameClaim].(string); ok {
|
||||
name = n
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupsClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Groups; groupsClaim != "" {
|
||||
if claimValue, exists := claims[groupsClaim]; exists {
|
||||
switch g := claimValue.(type) {
|
||||
case []any:
|
||||
for _, group := range g {
|
||||
if gs, ok := group.(string); ok {
|
||||
groups = append(groups, gs)
|
||||
}
|
||||
}
|
||||
case string:
|
||||
// Some IDPs return a single group as a string instead of an array
|
||||
groups = append(groups, g)
|
||||
default:
|
||||
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Role; roleClaim != "" {
|
||||
if r, ok := claims[roleClaim].(string); ok {
|
||||
role = r
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
|
||||
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
|
||||
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)
|
||||
}
|
||||
|
||||
oidcProvider, err := oidc.NewProvider(ctx, authDomain.AuthDomainConfig().OIDC.Issuer)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
scopes := make([]string, len(defaultScopes))
|
||||
copy(scopes, defaultScopes)
|
||||
|
||||
if authDomain.AuthDomainConfig().RoleMapping != nil && len(authDomain.AuthDomainConfig().RoleMapping.GroupMappings) > 0 {
|
||||
scopes = append(scopes, "groups")
|
||||
}
|
||||
|
||||
return oidcProvider, &oauth2.Config{
|
||||
ClientID: authDomain.AuthDomainConfig().OIDC.ClientID,
|
||||
ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret,
|
||||
Endpoint: oidcProvider.Endpoint(),
|
||||
Scopes: scopes,
|
||||
RedirectURL: (&url.URL{
|
||||
Scheme: siteURL.Scheme,
|
||||
Host: siteURL.Host,
|
||||
Path: redirectPath,
|
||||
}).String(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) claimsFromIDToken(ctx context.Context, authDomain *authtypes.AuthDomain, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) {
|
||||
rawIDToken, ok := token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "oidc: no id_token in token response")
|
||||
}
|
||||
|
||||
verifier := provider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().OIDC.ClientID})
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to verify token").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
var claims map[string]any
|
||||
if err := idToken.Claims(&claims); err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return claims, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) claimsFromUserInfo(ctx context.Context, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) {
|
||||
var claims map[string]any
|
||||
|
||||
userInfo, err := provider.UserInfo(ctx, oauth2.StaticTokenSource(&oauth2.Token{
|
||||
AccessToken: token.AccessToken,
|
||||
TokenType: "Bearer", // The UserInfo endpoint requires a bearer token as per RFC6750
|
||||
}))
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get user info").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if err := userInfo.Claims(&claims); err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return claims, nil
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
package samlcallbackauthn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"encoding/pem"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
saml2 "github.com/russellhaering/gosaml2"
|
||||
dsig "github.com/russellhaering/goxmldsig"
|
||||
)
|
||||
|
||||
const (
|
||||
redirectPath string = "/api/v1/complete/saml"
|
||||
)
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
store authtypes.AuthNStore
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func New(ctx context.Context, store authtypes.AuthNStore, licensing licensing.Licensing) (*AuthN, error) {
|
||||
return &AuthN{
|
||||
store: store,
|
||||
licensing: licensing,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
|
||||
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderSAML {
|
||||
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "saml: domain type is not saml")
|
||||
}
|
||||
|
||||
sp, err := a.serviceProvider(siteURL, authDomain)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
url, err := sp.BuildAuthURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return url, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*authtypes.CallbackIdentity, error) {
|
||||
state, err := authtypes.NewStateFromString(formValues.Get("RelayState"))
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "saml: invalid state").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
sp, err := a.serviceProvider(state.URL, authDomain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(formValues.Get("SAMLResponse"))
|
||||
if err != nil {
|
||||
if errors.As(err, &saml2.ErrVerification{}) {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, err.Error())
|
||||
}
|
||||
|
||||
if errors.As(err, &saml2.ErrMissingElement{}) {
|
||||
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, err.Error())
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "saml: expired saml response")
|
||||
}
|
||||
|
||||
email, err := valuer.NewEmail(assertionInfo.NameID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.")
|
||||
}
|
||||
|
||||
name := ""
|
||||
if nameAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Name; nameAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(nameAttribute); val != "" {
|
||||
name = val
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Groups; groupAttribute != "" {
|
||||
groups = assertionInfo.Values.GetAll(groupAttribute)
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Role; roleAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(roleAttribute); val != "" {
|
||||
role = val
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
|
||||
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: &state,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
|
||||
certStore, err := a.getCertificateStore(authDomain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
acsURL := &url.URL{Scheme: siteURL.Scheme, Host: siteURL.Host, Path: redirectPath}
|
||||
|
||||
// Note:
|
||||
// The ServiceProviderIssuer is the client id in case of keycloak. Since we set it to the host here, we need to set the client id == host in keycloak.
|
||||
// For AWSSSO, this is the value of Application SAML audience.
|
||||
return &saml2.SAMLServiceProvider{
|
||||
IdentityProviderSSOURL: authDomain.AuthDomainConfig().SAML.SamlIdp,
|
||||
IdentityProviderIssuer: authDomain.AuthDomainConfig().SAML.SamlEntity,
|
||||
ServiceProviderIssuer: siteURL.Host,
|
||||
AssertionConsumerServiceURL: acsURL.String(),
|
||||
SignAuthnRequests: !authDomain.AuthDomainConfig().SAML.InsecureSkipAuthNRequestsSigned,
|
||||
AllowMissingAttributes: true,
|
||||
IDPCertificateStore: certStore,
|
||||
SPKeyStore: dsig.RandomKeyStoreForTest(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) getCertificateStore(authDomain *authtypes.AuthDomain) (dsig.X509CertificateStore, error) {
|
||||
certStore := &dsig.MemoryX509CertificateStore{
|
||||
Roots: []*x509.Certificate{},
|
||||
}
|
||||
|
||||
var certBytes []byte
|
||||
if strings.Contains(authDomain.AuthDomainConfig().SAML.SamlCert, "-----BEGIN CERTIFICATE-----") {
|
||||
block, _ := pem.Decode([]byte(authDomain.AuthDomainConfig().SAML.SamlCert))
|
||||
if block == nil {
|
||||
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid pem cert found")
|
||||
}
|
||||
|
||||
certBytes = block.Bytes
|
||||
} else {
|
||||
certData, err := base64.StdEncoding.DecodeString(authDomain.AuthDomainConfig().SAML.SamlCert)
|
||||
if err != nil {
|
||||
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to read certificate: %s", err.Error())
|
||||
}
|
||||
|
||||
certBytes = certData
|
||||
}
|
||||
|
||||
idpCert, err := x509.ParseCertificate(certBytes)
|
||||
if err != nil {
|
||||
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to prepare saml request, invalid cert: %s", err.Error())
|
||||
}
|
||||
|
||||
certStore.Roots = append(certStore.Roots, idpCert)
|
||||
|
||||
return certStore, nil
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
|
||||
})
|
||||
}
|
||||
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
|
||||
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
|
||||
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.pkgAuthzService.Start(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Write(ctx, additions, deletions)
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
module base
|
||||
|
||||
type organisation
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
|
||||
type user
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user, anonymous]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type metaresources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
type metaresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetryresource
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
@@ -1,29 +0,0 @@
|
||||
package openfgaschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed base.fga
|
||||
baseDSL string
|
||||
)
|
||||
|
||||
type schema struct{}
|
||||
|
||||
func NewSchema() authz.Schema {
|
||||
return &schema{}
|
||||
}
|
||||
|
||||
func (schema *schema) Get(ctx context.Context) []openfgapkgtransformer.ModuleFile {
|
||||
return []openfgapkgtransformer.ModuleFile{
|
||||
{
|
||||
Name: "base.fga",
|
||||
Contents: baseDSL,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,282 +0,0 @@
|
||||
package httpgateway
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config gateway.Config
|
||||
httpClient *client.Client
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewProviderFactory(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, ps factory.ProviderSettings, c gateway.Config) (gateway.Gateway, error) {
|
||||
return New(ctx, ps, c, licensing)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config gateway.Config, licensing licensing.Licensing) (gateway.Gateway, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/gateway/httpgateway")
|
||||
|
||||
httpClient, err := client.New(
|
||||
settings.Logger(),
|
||||
providerSettings.TracerProvider,
|
||||
providerSettings.MeterProvider,
|
||||
client.WithRequestResponseLog(true),
|
||||
client.WithRetryCount(3),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
httpClient: httpClient,
|
||||
licensing: licensing,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetIngestionKeys(ctx context.Context, orgID valuer.UUID, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
qParams := url.Values{}
|
||||
qParams.Add("page", strconv.Itoa(page))
|
||||
qParams.Add("per_page", strconv.Itoa(perPage))
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys", qParams, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ingestionKeys []gatewaytypes.IngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pagination gatewaytypes.Pagination
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &gatewaytypes.GettableIngestionKeys{
|
||||
Keys: ingestionKeys,
|
||||
Pagination: pagination,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) SearchIngestionKeysByName(ctx context.Context, orgID valuer.UUID, name string, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
qParams := url.Values{}
|
||||
qParams.Add("name", name)
|
||||
qParams.Add("page", strconv.Itoa(page))
|
||||
qParams.Add("per_page", strconv.Itoa(perPage))
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys/search", qParams, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ingestionKeys []gatewaytypes.IngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pagination gatewaytypes.Pagination
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &gatewaytypes.GettableIngestionKeys{
|
||||
Keys: ingestionKeys,
|
||||
Pagination: pagination,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateIngestionKey(ctx context.Context, orgID valuer.UUID, name string, tags []string, expiresAt time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error) {
|
||||
requestBody := gatewaytypes.PostableIngestionKey{
|
||||
Name: name,
|
||||
Tags: tags,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys", nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var createdKeyResponse gatewaytypes.GettableCreatedIngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdKeyResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &createdKeyResponse, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) UpdateIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string, name string, tags []string, expiresAt time.Time) error {
|
||||
requestBody := gatewaytypes.PostableIngestionKey{
|
||||
Name: name,
|
||||
Tags: tags,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/keys/"+keyID, nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) DeleteIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string) error {
|
||||
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/keys/"+keyID, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, keyID string, signal string, limitConfig gatewaytypes.LimitConfig, tags []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error) {
|
||||
requestBody := gatewaytypes.PostableIngestionKeyLimit{
|
||||
Signal: signal,
|
||||
Config: limitConfig,
|
||||
Tags: tags,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys/"+keyID+"/limits", nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var createdIngestionKeyLimitResponse gatewaytypes.GettableCreatedIngestionKeyLimit
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdIngestionKeyLimitResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &createdIngestionKeyLimitResponse, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) UpdateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string, limitConfig gatewaytypes.LimitConfig, tags []string) error {
|
||||
requestBody := gatewaytypes.UpdatableIngestionKeyLimit{
|
||||
Config: limitConfig,
|
||||
Tags: tags,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/limits/"+limitID, nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) DeleteIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string) error {
|
||||
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/limits/"+limitID, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) do(ctx context.Context, orgID valuer.UUID, method string, path string, queryParams url.Values, body []byte) ([]byte, error) {
|
||||
license, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "no valid license found").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
// build url
|
||||
requestURL := provider.config.URL.JoinPath(path)
|
||||
|
||||
// add query params to the url
|
||||
if queryParams != nil {
|
||||
requestURL.RawQuery = queryParams.Encode()
|
||||
}
|
||||
|
||||
// build request
|
||||
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), bytes.NewBuffer(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// add headers needed to call gateway
|
||||
request.Header.Set("Content-Type", "application/json")
|
||||
request.Header.Set("X-Signoz-Cloud-Api-Key", license.Key)
|
||||
request.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
request.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
// execute request
|
||||
response, err := provider.httpClient.Do(request)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// read response
|
||||
defer response.Body.Close()
|
||||
responseBody, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only 2XX
|
||||
if response.StatusCode/100 == 2 {
|
||||
return responseBody, nil
|
||||
}
|
||||
|
||||
errorMessage := gjson.GetBytes(responseBody, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
// return error for non 2XX
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
}
|
||||
|
||||
func (provider *Provider) errFromStatusCode(code int, errorMessage string) error {
|
||||
switch code {
|
||||
case http.StatusBadRequest:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
case http.StatusUnauthorized:
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
case http.StatusForbidden:
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
case http.StatusNotFound:
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
}
|
||||
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
package licensing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
|
||||
once.Do(func() {
|
||||
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid licensing config"))
|
||||
panic(fmt.Errorf("invalid licensing config: %w", err))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -1,299 +0,0 @@
|
||||
package impldashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
pkgDashboardModule dashboard.Module
|
||||
store dashboardtypes.Store
|
||||
settings factory.ScopedProviderSettings
|
||||
roleSetter role.Setter
|
||||
granter role.Granter
|
||||
querier querier.Querier
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
|
||||
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
|
||||
|
||||
return &module{
|
||||
pkgDashboardModule: pkgDashboardModule,
|
||||
store: store,
|
||||
settings: scopedProviderSettings,
|
||||
roleSetter: roleSetter,
|
||||
granter: granter,
|
||||
querier: querier,
|
||||
licensing: licensing,
|
||||
}
|
||||
}
|
||||
|
||||
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storablePublicDashboard, err := module.store.GetPublic(ctx, publicDashboard.DashboardID.StringValue())
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
if storablePublicDashboard != nil {
|
||||
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
additionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) GetPublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) (*dashboardtypes.PublicDashboard, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storablePublicDashboard, err := module.store.GetPublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dashboardtypes.NewPublicDashboardFromStorablePublicDashboard(storablePublicDashboard), nil
|
||||
}
|
||||
|
||||
func (module *module) GetDashboardByPublicID(ctx context.Context, id valuer.UUID) (*dashboardtypes.Dashboard, error) {
|
||||
storableDashboard, err := module.store.GetDashboardByPublicID(ctx, id.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dashboardtypes.NewDashboardFromStorableDashboard(storableDashboard), nil
|
||||
}
|
||||
|
||||
func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id valuer.UUID, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
orgIDs := make([]string, len(orgs))
|
||||
for idx, org := range orgs {
|
||||
orgIDs[idx] = org.ID.StringValue()
|
||||
}
|
||||
|
||||
storableDashboard, err := module.store.GetDashboardByOrgsAndPublicID(ctx, orgIDs, id.StringValue())
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
|
||||
}, storableDashboard.OrgID, nil
|
||||
}
|
||||
|
||||
func (module *module) GetPublicWidgetQueryRange(ctx context.Context, id valuer.UUID, widgetIdx, startTime, endTime uint64) (*querybuildertypesv5.QueryRangeResponse, error) {
|
||||
dashboard, err := module.GetDashboardByPublicID(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query, err := dashboard.GetWidgetQuery(startTime, endTime, widgetIdx, module.settings.Logger())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return module.querier.QueryRange(ctx, dashboard.OrgID, query)
|
||||
}
|
||||
|
||||
func (module *module) UpdatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return module.store.UpdatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
|
||||
}
|
||||
|
||||
func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
dashboard, err := module.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if dashboard.Locked {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "dashboard is locked, please unlock the dashboard to be delete it")
|
||||
}
|
||||
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
err := module.deletePublic(ctx, orgID, id)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.Delete(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
dashboards, err := module.store.List(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
publicDashboards, err := module.store.ListPublic(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stats := make(map[string]any)
|
||||
maps.Copy(stats, dashboardtypes.NewStatsFromStorableDashboards(dashboards))
|
||||
maps.Copy(stats, dashboardtypes.NewStatsFromStorablePublicDashboards(publicDashboards))
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
func (module *module) Create(ctx context.Context, orgID valuer.UUID, createdBy string, creator valuer.UUID, data dashboardtypes.PostableDashboard) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Create(ctx, orgID, createdBy, creator, data)
|
||||
}
|
||||
|
||||
func (module *module) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Get(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string][]map[string]string, error) {
|
||||
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
|
||||
}
|
||||
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return module.pkgDashboardModule.MustGetTypeables()
|
||||
}
|
||||
|
||||
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.List(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *module) Update(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, data dashboardtypes.UpdatableDashboard, diff int) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Update(ctx, orgID, id, updatedBy, data, diff)
|
||||
}
|
||||
|
||||
func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, role types.Role, lock bool) error {
|
||||
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
|
||||
}
|
||||
|
||||
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type setter struct {
|
||||
store roletypes.Store
|
||||
authz authz.AuthZ
|
||||
licensing licensing.Licensing
|
||||
registry []role.RegisterTypeable
|
||||
}
|
||||
|
||||
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
|
||||
return &setter{
|
||||
store: store,
|
||||
authz: authz,
|
||||
licensing: licensing,
|
||||
registry: registry,
|
||||
}
|
||||
}
|
||||
|
||||
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if existingRole != nil {
|
||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
||||
}
|
||||
|
||||
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
typeables := make([]authtypes.Typeable, 0)
|
||||
for _, register := range setter.registry {
|
||||
typeables = append(typeables, register.MustGetTypeables()...)
|
||||
}
|
||||
// role module cannot self register itself!
|
||||
typeables = append(typeables, setter.MustGetTypeables()...)
|
||||
|
||||
resources := make([]*authtypes.Resource, 0)
|
||||
for _, typeable := range typeables {
|
||||
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
|
||||
}
|
||||
|
||||
return resources
|
||||
}
|
||||
|
||||
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
storableRole, err := setter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects := make([]*authtypes.Object, 0)
|
||||
for _, resource := range setter.GetResources(ctx) {
|
||||
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
|
||||
resourceObjects, err := setter.
|
||||
authz.
|
||||
ListObjects(
|
||||
ctx,
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
|
||||
relation,
|
||||
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects = append(objects, resourceObjects...)
|
||||
}
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableRole, err := setter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.CanEditDelete()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return setter.store.Delete(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
||||
}
|
||||
@@ -50,7 +50,7 @@ type GetAnomaliesResponse struct {
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
// (rounded value for past period) + (seasonal growth)
|
||||
// (rounded value for past peiod) + (seasonal growth)
|
||||
//
|
||||
// score = abs(value - prediction) / stddev (current_season_query)
|
||||
type anomalyQueryParams struct {
|
||||
@@ -74,12 +74,12 @@ type anomalyQueryParams struct {
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal periods to the current season
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal periods to the current season
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -20,8 +19,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -36,8 +35,10 @@ type APIHandlerOptions struct {
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
GlobalConfig global.Config
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
JWT *authtypes.JWT
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
@@ -59,7 +60,6 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -92,6 +92,9 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// routes available only in ee version
|
||||
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
|
||||
|
||||
// paid plans specific routes
|
||||
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
|
||||
|
||||
// base overrides
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
|
||||
|
||||
|
||||
107
ee/query-service/app/api/auth.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
ssoError := []byte("Login failed. Please contact your system administrator")
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
|
||||
base64.StdEncoding.Encode(dst, ssoError)
|
||||
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveSAML completes a SAML request and gets user logged in
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// the relay state is sent when a login request is submitted to
|
||||
// Idp.
|
||||
relayState := r.FormValue("RelayState")
|
||||
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(domain.OrgID)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
email := assertionInfo.NameID
|
||||
if email == "" {
|
||||
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
@@ -10,13 +10,14 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@@ -76,7 +77,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -84,7 +85,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
result.IngestionUrl = ah.opts.GlobalConfig.IngestionURL.String()
|
||||
result.IngestionUrl = ingestionUrl
|
||||
result.SigNozAPIUrl = signozApiUrl
|
||||
|
||||
gatewayUrl := ah.opts.GatewayUrl
|
||||
@@ -167,66 +168,95 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
|
||||
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
|
||||
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
if integrationUserResult != nil {
|
||||
return &integrationUserResult.User, nil
|
||||
}
|
||||
|
||||
zap.L().Info(
|
||||
"cloud integration user not found. Attempting to create the user",
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration user: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
password, err := types.NewFactorPassword(uuid.NewString())
|
||||
|
||||
integrationUser, err := ah.Signoz.Modules.User.CreateUserWithPassword(ctx, newUser, password)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
|
||||
|
||||
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
return cloudIntegrationUser, nil
|
||||
return integrationUser, nil
|
||||
}
|
||||
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, *basemodel.ApiError,
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
) {
|
||||
// TODO: remove this struct from here
|
||||
url := fmt.Sprintf(
|
||||
"%s%s",
|
||||
strings.TrimSuffix(constants.ZeusURL, "/"),
|
||||
"/v2/deployments/me",
|
||||
)
|
||||
|
||||
type deploymentResponse struct {
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
Status string `json:"status"`
|
||||
Error string `json:"error"`
|
||||
Data struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
resp, apiErr := requestAndParseResponse[deploymentResponse](
|
||||
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't query for deployment info",
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Status != "success" {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: status: %s, error: %s",
|
||||
resp.Status, resp.Error,
|
||||
))
|
||||
}
|
||||
|
||||
resp := new(deploymentResponse)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Name
|
||||
regionDns := resp.Data.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Data.Name
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
|
||||
))
|
||||
}
|
||||
|
||||
ingestionUrl := fmt.Sprintf("https://ingest.%s", regionDns)
|
||||
|
||||
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
|
||||
|
||||
return signozApiUrl, nil
|
||||
return ingestionUrl, signozApiUrl, nil
|
||||
}
|
||||
|
||||
type ingestionKey struct {
|
||||
|
||||
@@ -9,10 +9,9 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
pkgError "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
@@ -26,7 +25,11 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
|
||||
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
|
||||
if err != nil {
|
||||
@@ -56,16 +59,13 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
useSpanMetrics := ah.Signoz.Flagger.BooleanOrEmpty(ctx, flagger.FeatureUseSpanMetrics, evalCtx)
|
||||
|
||||
featureSet = append(featureSet, &licensetypes.Feature{
|
||||
Name: valuer.NewString(flagger.FeatureUseSpanMetrics.String()),
|
||||
Active: useSpanMetrics,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
})
|
||||
if constants.IsPreferSpanMetrics {
|
||||
for idx, feature := range featureSet {
|
||||
if feature.Name == licensetypes.UseSpanMetrics {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if constants.IsDotMetricsEnabled {
|
||||
for idx, feature := range featureSet {
|
||||
|
||||
@@ -257,20 +257,18 @@ func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: qbtypes.QueryData{
|
||||
Data: struct {
|
||||
Results []any `json:"results"`
|
||||
}{
|
||||
Results: results,
|
||||
},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
Meta: struct {
|
||||
RowsScanned uint64 `json:"rowsScanned"`
|
||||
BytesScanned uint64 `json:"bytesScanned"`
|
||||
DurationMS uint64 `json:"durationMs"`
|
||||
}{},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
|
||||
@@ -3,18 +3,10 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
@@ -31,6 +23,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
@@ -51,10 +44,24 @@ import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type ServerOptions struct {
|
||||
Config signoz.Config
|
||||
SigNoz *signoz.SigNoz
|
||||
HTTPHostPort string
|
||||
PrivateHostPort string
|
||||
PreferSpanMetrics bool
|
||||
FluxInterval string
|
||||
FluxIntervalForTraceDetail string
|
||||
Cluster string
|
||||
GatewayUrl string
|
||||
Jwt *authtypes.JWT
|
||||
}
|
||||
|
||||
// Server runs HTTP, Mux and a grpc server
|
||||
type Server struct {
|
||||
config signoz.Config
|
||||
signoz *signoz.SigNoz
|
||||
jwt *authtypes.JWT
|
||||
ruleManager *baserules.Manager
|
||||
|
||||
// public http router
|
||||
@@ -62,6 +69,11 @@ type Server struct {
|
||||
httpServer *http.Server
|
||||
httpHostPort string
|
||||
|
||||
// private http
|
||||
privateConn net.Listener
|
||||
privateHTTP *http.Server
|
||||
privateHostPort string
|
||||
|
||||
opampServer *opamp.Server
|
||||
|
||||
// Usage manager
|
||||
@@ -71,32 +83,19 @@ type Server struct {
|
||||
}
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) {
|
||||
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 10000,
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
signoz.TelemetryStore.Cluster(),
|
||||
config.Querier.FluxInterval,
|
||||
cacheForTraceDetail,
|
||||
signoz.Cache,
|
||||
nil,
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
@@ -105,12 +104,10 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
signoz.Alertmanager,
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.TelemetryMetadataStore,
|
||||
signoz.Prometheus,
|
||||
signoz.Modules.OrgGetter,
|
||||
signoz.Querier,
|
||||
signoz.Instrumentation.ToProviderSettings(),
|
||||
signoz.QueryParser,
|
||||
signoz.Instrumentation.Logger(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -172,7 +169,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
GlobalConfig: config.Global,
|
||||
JWT: jwt,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
@@ -183,8 +180,10 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
s := &Server{
|
||||
config: config,
|
||||
signoz: signoz,
|
||||
jwt: jwt,
|
||||
ruleManager: rm,
|
||||
httpHostPort: baseconst.HTTPHostPort,
|
||||
privateHostPort: baseconst.PrivateHostPort,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
usageManager: usageManager,
|
||||
}
|
||||
@@ -197,6 +196,13 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
|
||||
s.httpServer = httpServer
|
||||
|
||||
privateServer, err := s.createPrivateServer(apiHandler)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.privateHTTP = privateServer
|
||||
|
||||
s.opampServer = opamp.InitializeServer(
|
||||
&opAmpModel.AllAgents, agentConfMgr, signoz.Instrumentation,
|
||||
)
|
||||
@@ -209,21 +215,41 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
return s.unavailableChannel
|
||||
}
|
||||
|
||||
func (s *Server) createPrivateServer(apiHandler *api.APIHandler) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
|
||||
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
||||
s.config.APIServer.Timeout.ExcludedRoutes,
|
||||
s.config.APIServer.Timeout.Default,
|
||||
s.config.APIServer.Timeout.Max,
|
||||
).Wrap)
|
||||
r.Use(middleware.NewLogging(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes).Wrap)
|
||||
|
||||
apiHandler.RegisterPrivateRoutes(r)
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
//todo(amol): find out a way to add exact domain or
|
||||
// ip here for alert manager
|
||||
AllowedOrigins: []string{"*"},
|
||||
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH"},
|
||||
AllowedHeaders: []string{"Accept", "Authorization", "Content-Type", "SIGNOZ-API-KEY", "X-SIGNOZ-QUERY-ID", "Sec-WebSocket-Protocol"},
|
||||
})
|
||||
|
||||
handler := c.Handler(r)
|
||||
handler = handlers.CompressHandler(handler)
|
||||
|
||||
return &http.Server{
|
||||
Handler: handler,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
|
||||
otelmux.WithTracerProvider(s.signoz.Instrumentation.TracerProvider()),
|
||||
otelmux.WithPropagators(propagation.NewCompositeTextMapPropagator(propagation.Baggage{}, propagation.TraceContext{})),
|
||||
otelmux.WithFilter(func(r *http.Request) bool {
|
||||
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
|
||||
}),
|
||||
otelmux.WithPublicEndpoint(),
|
||||
))
|
||||
r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
||||
s.config.APIServer.Timeout.ExcludedRoutes,
|
||||
@@ -248,11 +274,6 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
err := s.signoz.APIServer.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
|
||||
@@ -263,7 +284,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
|
||||
handler = handlers.CompressHandler(handler)
|
||||
|
||||
err = web.AddToRouter(r)
|
||||
err := web.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -289,6 +310,19 @@ func (s *Server) initListeners() error {
|
||||
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on %s...", s.httpHostPort))
|
||||
|
||||
// listen on private port to support internal services
|
||||
privateHostPort := s.privateHostPort
|
||||
|
||||
if privateHostPort == "" {
|
||||
return fmt.Errorf("baseconst.PrivateHostPort is required")
|
||||
}
|
||||
|
||||
s.privateConn, err = net.Listen("tcp", privateHostPort)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zap.L().Info(fmt.Sprintf("Query server started listening on private port %s...", s.privateHostPort))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -327,6 +361,26 @@ func (s *Server) Start(ctx context.Context) error {
|
||||
}
|
||||
}()
|
||||
|
||||
var privatePort int
|
||||
if port, err := utils.GetPort(s.privateConn.Addr()); err == nil {
|
||||
privatePort = port
|
||||
}
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting Private HTTP server", zap.Int("port", privatePort), zap.String("addr", s.privateHostPort))
|
||||
|
||||
switch err := s.privateHTTP.Serve(s.privateConn); err {
|
||||
case nil, http.ErrServerClosed, cmux.ErrListenerClosed:
|
||||
// normal exit, nothing to do
|
||||
zap.L().Info("private http server closed")
|
||||
default:
|
||||
zap.L().Error("Could not start private HTTP server", zap.Error(err))
|
||||
}
|
||||
|
||||
s.unavailableChannel <- healthcheck.Unavailable
|
||||
|
||||
}()
|
||||
|
||||
go func() {
|
||||
zap.L().Info("Starting OpAmp Websocket server", zap.String("addr", baseconst.OpAmpWsEndpoint))
|
||||
err := s.opampServer.Start(baseconst.OpAmpWsEndpoint)
|
||||
@@ -346,6 +400,12 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
if s.privateHTTP != nil {
|
||||
if err := s.privateHTTP.Shutdown(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
s.opampServer.Stop()
|
||||
|
||||
if s.ruleManager != nil {
|
||||
@@ -358,29 +418,33 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, metadataStore telemetrytypes.MetadataStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
|
||||
func makeRulesManager(
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
orgGetter organization.Getter,
|
||||
querier querier.Querier,
|
||||
logger *slog.Logger,
|
||||
) (*baserules.Manager, error) {
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
MetadataStore: metadataStore,
|
||||
Prometheus: prometheus,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
Querier: querier,
|
||||
SLogger: providerSettings.Logger,
|
||||
SLogger: logger,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
PrepareTestRuleFunc: rules.TestNotification,
|
||||
Alertmanager: alertmanager,
|
||||
SQLStore: sqlstore,
|
||||
OrgGetter: orgGetter,
|
||||
RuleStore: ruleStore,
|
||||
MaintenanceStore: maintenanceStore,
|
||||
SqlStore: sqlstore,
|
||||
QueryParser: queryParser,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -4,12 +4,19 @@ import (
|
||||
"os"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultSiteURL = "https://localhost:8080"
|
||||
)
|
||||
|
||||
var LicenseSignozIo = "https://license.signoz.io/api/v1"
|
||||
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
if len(v) == 0 {
|
||||
@@ -20,12 +27,24 @@ func GetOrDefaultEnv(key string, fallback string) string {
|
||||
|
||||
// constant functions that override env vars
|
||||
|
||||
// GetDefaultSiteURL returns default site url, primarily
|
||||
// used to send saml request and allowing backend to
|
||||
// handle http redirect
|
||||
func GetDefaultSiteURL() string {
|
||||
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
|
||||
}
|
||||
|
||||
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
|
||||
|
||||
var IsDotMetricsEnabled = false
|
||||
var IsPreferSpanMetrics = false
|
||||
|
||||
func init() {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "true") == "true" {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "false") == "true" {
|
||||
IsDotMetricsEnabled = true
|
||||
}
|
||||
|
||||
if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" {
|
||||
IsPreferSpanMetrics = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
package model
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
)
|
||||
@@ -57,7 +57,7 @@ func Unauthorized(err error) *ApiError {
|
||||
func BadRequestStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorBadData,
|
||||
Err: errors.New(s),
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ func InternalError(err error) *ApiError {
|
||||
func InternalErrorStr(s string) *ApiError {
|
||||
return &ApiError{
|
||||
Typ: basemodel.ErrorInternal,
|
||||
Err: errors.New(s),
|
||||
Err: fmt.Errorf(s),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -35,6 +35,7 @@ import (
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -78,6 +79,11 @@ func NewAnomalyRule(
|
||||
|
||||
opts = append(opts, baserules.WithLogger(logger))
|
||||
|
||||
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
|
||||
target := -1 * *p.RuleCondition.Target
|
||||
p.RuleCondition.Target = &target
|
||||
}
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -161,9 +167,16 @@ func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
|
||||
)
|
||||
|
||||
st, en := r.Timestamps(ts)
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
start := ts.Add(-time.Duration(r.EvalWindow())).UnixMilli()
|
||||
end := ts.UnixMilli()
|
||||
|
||||
if r.EvalDelay() > 0 {
|
||||
start = start - int64(r.EvalDelay().Milliseconds())
|
||||
end = end - int64(r.EvalDelay().Milliseconds())
|
||||
}
|
||||
// round to minute otherwise we could potentially miss data
|
||||
start = start - (start % (60 * 1000))
|
||||
end = end - (end % (60 * 1000))
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
@@ -234,29 +247,16 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
smpl, shouldAlert := r.ShouldAlert(*series)
|
||||
if shouldAlert {
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
@@ -290,46 +290,21 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
smpl, shouldAlert := r.ShouldAlert(*series)
|
||||
if shouldAlert {
|
||||
resultVector = append(resultVector, smpl)
|
||||
}
|
||||
}
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
|
||||
|
||||
prevState := r.State()
|
||||
|
||||
@@ -346,7 +321,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
}
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r.mtx.Lock()
|
||||
@@ -355,19 +330,14 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
|
||||
ruleReceivers := r.Threshold.GetRuleReceivers()
|
||||
ruleReceiverMap := make(map[string][]string)
|
||||
for _, value := range ruleReceivers {
|
||||
ruleReceiverMap[value.Name] = value.Channels
|
||||
}
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
for _, lbl := range smpl.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
@@ -411,7 +381,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
if smpl.IsMissing {
|
||||
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
|
||||
lb.Set(labels.NoDataLabel, "true")
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
@@ -421,7 +390,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
if _, ok := alerts[h]; ok {
|
||||
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", "rule_id", r.ID(), "alert", alerts[h])
|
||||
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
return 0, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
@@ -432,13 +401,13 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
State: model.StatePending,
|
||||
Value: smpl.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Receivers: r.PreferredChannels(),
|
||||
Missing: smpl.IsMissing,
|
||||
IsRecovering: smpl.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
|
||||
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
@@ -447,12 +416,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
alert.Receivers = r.PreferredChannels()
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -508,30 +472,6 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeFiringToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
@@ -559,7 +499,7 @@ func (r *AnomalyRule) String() string {
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
}
|
||||
|
||||
byt, err := json.Marshal(ar)
|
||||
byt, err := yaml.Marshal(ar)
|
||||
if err != nil {
|
||||
return fmt.Sprintf("error marshaling alerting rule: %s", err.Error())
|
||||
}
|
||||
|
||||
@@ -1,268 +0,0 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
// We need this because the anomaly provider makes 6 different queries for various
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
alertOnAbsent bool
|
||||
expectAlerts int
|
||||
}{
|
||||
{
|
||||
description: "AlertOnAbsent=false",
|
||||
alertOnAbsent: false,
|
||||
expectAlerts: 0,
|
||||
},
|
||||
{
|
||||
description: "AlertOnAbsent=true",
|
||||
alertOnAbsent: true,
|
||||
expectAlerts: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AlertOnAbsent = c.alertOnAbsent
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule(
|
||||
"test-anomaly-rule",
|
||||
valuer.GenerateUUID(),
|
||||
&postableRule,
|
||||
reader,
|
||||
nil,
|
||||
logger,
|
||||
nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlerts, alertsFound)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
// Test missing data alert with AbsentFor grace period
|
||||
// 1. Call Eval with data at time t1, to populate lastTimestampWithDatapoints
|
||||
// 2. Call Eval without data at time t2
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
|
||||
// Set target higher than test data so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data with AbsentFor",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
absentFor uint64
|
||||
timeBetweenEvals time.Duration
|
||||
expectAlertOnEval2 int
|
||||
}{
|
||||
{
|
||||
description: "WithinGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 4 * time.Minute,
|
||||
expectAlertOnEval2: 0,
|
||||
},
|
||||
{
|
||||
description: "AfterGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 6 * time.Minute,
|
||||
expectAlertOnEval2: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AbsentFor = c.absentFor
|
||||
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, alertsFound1, "First eval with data should not alert")
|
||||
|
||||
alertsFound2, err := rule.Eval(context.Background(), t2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlertOnEval2, alertsFound2)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -3,14 +3,12 @@ package rules
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
@@ -22,10 +20,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
var task baserules.Task
|
||||
|
||||
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
|
||||
evaluation, err := opts.Rule.Evaluation.GetEvaluation()
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
|
||||
}
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
@@ -37,8 +31,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.SLogger,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -47,8 +39,8 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create ch rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -61,8 +53,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -71,8 +61,8 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create promql rule task for evalution
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
@@ -86,8 +76,6 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
if err != nil {
|
||||
return task, err
|
||||
@@ -95,8 +83,8 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create anomaly rule task for evalution
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(opts.Rule.Frequency), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
@@ -132,6 +120,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
|
||||
@@ -146,8 +135,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -168,8 +155,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -189,8 +174,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
@@ -203,12 +186,16 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
// set timestamp to current utc time
|
||||
ts := time.Now().UTC()
|
||||
|
||||
alertsFound, err := rule.Eval(ctx, ts)
|
||||
count, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)
|
||||
alertsFound, ok := count.(int)
|
||||
if !ok {
|
||||
return 0, basemodel.InternalError(fmt.Errorf("something went wrong"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), opts.NotifyFunc)
|
||||
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
@@ -1,283 +0,0 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"math"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
alertmanagermock "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
)
|
||||
|
||||
func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
|
||||
target := 10.0
|
||||
recovery := 5.0
|
||||
|
||||
for _, tc := range rules.TcTestNotiSendUnmatchedThresholdRule {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
rule := rules.ThresholdRuleAtLeastOnceValueAbove(target, &recovery)
|
||||
|
||||
// Marshal rule to JSON as TestNotification expects
|
||||
ruleBytes, err := json.Marshal(rule)
|
||||
require.NoError(t, err)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
triggeredTestAlerts := []map[*alertmanagertypes.PostableAlert][]string{}
|
||||
|
||||
// Create manager using test factory with hooks
|
||||
mgr := rules.NewTestManager(t, &rules.TestManagerOptions{
|
||||
AlertmanagerHook: func(am alertmanager.Alertmanager) {
|
||||
fAlert := am.(*alertmanagermock.MockAlertmanager)
|
||||
// mock set notification config
|
||||
fAlert.On("SetNotificationConfig", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil)
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
if tc.ExpectAlerts > 0 {
|
||||
fAlert.On("TestAlert", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
|
||||
triggeredTestAlerts = append(triggeredTestAlerts, args.Get(3).(map[*alertmanagertypes.PostableAlert][]string))
|
||||
}).Return(nil).Times(tc.ExpectAlerts)
|
||||
}
|
||||
},
|
||||
ManagerOptionsHook: func(opts *rules.ManagerOptions) {
|
||||
opts.PrepareTestRuleFunc = TestNotification
|
||||
},
|
||||
SqlStoreHook: func(store sqlstore.SQLStore) {
|
||||
mockStore := store.(*sqlstoretest.Provider)
|
||||
// Mock the organizations query that SendAlerts makes
|
||||
// Bun generates: SELECT id FROM organizations LIMIT 1 (or SELECT "id" FROM "organizations" LIMIT 1)
|
||||
orgRows := mockStore.Mock().NewRows([]string{"id"}).AddRow(orgID.StringValue())
|
||||
// Match bun's generated query pattern - bun may quote identifiers
|
||||
mockStore.Mock().ExpectQuery("SELECT (.+) FROM (.+)organizations(.+) LIMIT (.+)").WillReturnRows(orgRows)
|
||||
},
|
||||
TelemetryStoreHook: func(store telemetrystore.TelemetryStore) {
|
||||
telemetryStore := store.(*telemetrystoretest.Provider)
|
||||
// Set up mock data for telemetry store
|
||||
cols := make([]cmock.ColumnType, 0)
|
||||
cols = append(cols, cmock.ColumnType{Name: "value", Type: "Float64"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "attr", Type: "String"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "ts", Type: "DateTime"})
|
||||
|
||||
alertDataRows := cmock.NewRows(cols, tc.Values)
|
||||
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
// Generate query arguments for the metric query
|
||||
evalTime := time.Now().UTC()
|
||||
evalWindow := 5 * time.Minute
|
||||
evalDelay := time.Duration(0)
|
||||
queryArgs := rules.GenerateMetricQueryCHArgs(
|
||||
evalTime,
|
||||
evalWindow,
|
||||
evalDelay,
|
||||
"probe_success",
|
||||
metrictypes.Unspecified,
|
||||
)
|
||||
|
||||
mock.ExpectQuery("*WITH __temporal_aggregation_cte*").
|
||||
WithArgs(queryArgs...).
|
||||
WillReturnRows(alertDataRows)
|
||||
},
|
||||
})
|
||||
|
||||
count, apiErr := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
if apiErr != nil {
|
||||
t.Logf("TestNotification error: %v, type: %s", apiErr.Err, apiErr.Typ)
|
||||
}
|
||||
require.Nil(t, apiErr)
|
||||
assert.Equal(t, tc.ExpectAlerts, count)
|
||||
|
||||
if tc.ExpectAlerts > 0 {
|
||||
// check if the alert has been triggered
|
||||
require.Len(t, triggeredTestAlerts, 1)
|
||||
var gotAlerts []*alertmanagertypes.PostableAlert
|
||||
for a := range triggeredTestAlerts[0] {
|
||||
gotAlerts = append(gotAlerts, a)
|
||||
}
|
||||
require.Len(t, gotAlerts, tc.ExpectAlerts)
|
||||
// check if the alert has triggered with correct threshold value
|
||||
if tc.ExpectValue != 0 {
|
||||
assert.Equal(t, strconv.FormatFloat(tc.ExpectValue, 'f', -1, 64), gotAlerts[0].Annotations["value"])
|
||||
}
|
||||
} else {
|
||||
// check if no alerts have been triggered
|
||||
assert.Empty(t, triggeredTestAlerts)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestManager_TestNotification_SendUnmatched_PromRule(t *testing.T) {
|
||||
target := 10.0
|
||||
|
||||
for _, tc := range rules.TcTestNotificationSendUnmatchedPromRule {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
// Capture base time once per test case to ensure consistent timestamps
|
||||
baseTime := time.Now().UTC()
|
||||
|
||||
rule := rules.BuildPromAtLeastOnceValueAbove(target, nil)
|
||||
|
||||
// Marshal rule to JSON as TestNotification expects
|
||||
ruleBytes, err := json.Marshal(rule)
|
||||
require.NoError(t, err)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
triggeredTestAlerts := []map[*alertmanagertypes.PostableAlert][]string{}
|
||||
|
||||
// Variable to store promProvider for cleanup
|
||||
var promProvider *prometheustest.Provider
|
||||
|
||||
// Create manager using test factory with hooks
|
||||
mgr := rules.NewTestManager(t, &rules.TestManagerOptions{
|
||||
AlertmanagerHook: func(am alertmanager.Alertmanager) {
|
||||
mockAM := am.(*alertmanagermock.MockAlertmanager)
|
||||
// mock set notification config
|
||||
mockAM.On("SetNotificationConfig", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil)
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
if tc.ExpectAlerts > 0 {
|
||||
mockAM.On("TestAlert", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
|
||||
triggeredTestAlerts = append(triggeredTestAlerts, args.Get(3).(map[*alertmanagertypes.PostableAlert][]string))
|
||||
}).Return(nil).Times(tc.ExpectAlerts)
|
||||
}
|
||||
},
|
||||
SqlStoreHook: func(store sqlstore.SQLStore) {
|
||||
mockStore := store.(*sqlstoretest.Provider)
|
||||
// Mock the organizations query that SendAlerts makes
|
||||
orgRows := mockStore.Mock().NewRows([]string{"id"}).AddRow(orgID.StringValue())
|
||||
mockStore.Mock().ExpectQuery("SELECT (.+) FROM (.+)organizations(.+) LIMIT (.+)").WillReturnRows(orgRows)
|
||||
},
|
||||
TelemetryStoreHook: func(store telemetrystore.TelemetryStore) {
|
||||
mockStore := store.(*telemetrystoretest.Provider)
|
||||
|
||||
// Set up Prometheus-specific mock data
|
||||
// Fingerprint columns for Prometheus queries
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
// Samples columns for Prometheus queries
|
||||
samplesCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "unix_milli", Type: "Int64"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
{Name: "flags", Type: "UInt32"},
|
||||
}
|
||||
|
||||
// Calculate query time range similar to Prometheus rule tests
|
||||
// TestNotification uses time.Now().UTC() for evaluation
|
||||
// We calculate the query window based on current time to match what the actual evaluation will use
|
||||
evalTime := baseTime
|
||||
evalWindowMs := int64(5 * 60 * 1000) // 5 minutes in ms
|
||||
evalTimeMs := evalTime.UnixMilli()
|
||||
queryStart := ((evalTimeMs-2*evalWindowMs)/60000)*60000 + 1 // truncate to minute + 1ms
|
||||
queryEnd := (evalTimeMs / 60000) * 60000 // truncate to minute
|
||||
|
||||
// Create fingerprint data
|
||||
fingerprint := uint64(12345)
|
||||
labelsJSON := `{"__name__":"test_metric"}`
|
||||
fingerprintData := [][]interface{}{
|
||||
{fingerprint, labelsJSON},
|
||||
}
|
||||
fingerprintRows := cmock.NewRows(fingerprintCols, fingerprintData)
|
||||
|
||||
// Create samples data from test case values, calculating timestamps relative to baseTime
|
||||
validSamplesData := make([][]interface{}, 0)
|
||||
for _, v := range tc.Values {
|
||||
// Skip NaN and Inf values in the samples data
|
||||
if math.IsNaN(v.Value) || math.IsInf(v.Value, 0) {
|
||||
continue
|
||||
}
|
||||
// Calculate timestamp relative to baseTime
|
||||
sampleTimestamp := baseTime.Add(v.Offset).UnixMilli()
|
||||
validSamplesData = append(validSamplesData, []interface{}{
|
||||
"test_metric",
|
||||
fingerprint,
|
||||
sampleTimestamp,
|
||||
v.Value,
|
||||
uint32(0), // flags - 0 means normal value
|
||||
})
|
||||
}
|
||||
samplesRows := cmock.NewRows(samplesCols, validSamplesData)
|
||||
|
||||
mock := mockStore.Mock()
|
||||
|
||||
// Mock the fingerprint query (for Prometheus label matching)
|
||||
mock.ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(fingerprintRows)
|
||||
|
||||
// Mock the samples query (for Prometheus metric data)
|
||||
mock.ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs(
|
||||
"test_metric",
|
||||
"test_metric",
|
||||
"__name__",
|
||||
"test_metric",
|
||||
queryStart,
|
||||
queryEnd,
|
||||
).
|
||||
WillReturnRows(samplesRows)
|
||||
|
||||
// Create Prometheus provider for this test
|
||||
promProvider = prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, store)
|
||||
},
|
||||
ManagerOptionsHook: func(opts *rules.ManagerOptions) {
|
||||
// Set Prometheus provider for PromQL queries
|
||||
if promProvider != nil {
|
||||
opts.Prometheus = promProvider
|
||||
}
|
||||
opts.PrepareTestRuleFunc = TestNotification
|
||||
},
|
||||
})
|
||||
|
||||
count, apiErr := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
if apiErr != nil {
|
||||
t.Logf("TestNotification error: %v, type: %s", apiErr.Err, apiErr.Typ)
|
||||
}
|
||||
require.Nil(t, apiErr)
|
||||
assert.Equal(t, tc.ExpectAlerts, count)
|
||||
|
||||
if tc.ExpectAlerts > 0 {
|
||||
// check if the alert has been triggered
|
||||
require.Len(t, triggeredTestAlerts, 1)
|
||||
var gotAlerts []*alertmanagertypes.PostableAlert
|
||||
for a := range triggeredTestAlerts[0] {
|
||||
gotAlerts = append(gotAlerts, a)
|
||||
}
|
||||
require.Len(t, gotAlerts, tc.ExpectAlerts)
|
||||
// check if the alert has triggered with correct threshold value
|
||||
if tc.ExpectValue != 0 && !math.IsNaN(tc.ExpectValue) && !math.IsInf(tc.ExpectValue, 0) {
|
||||
assert.Equal(t, strconv.FormatFloat(tc.ExpectValue, 'f', -1, 64), gotAlerts[0].Annotations["value"])
|
||||
}
|
||||
} else {
|
||||
// check if no alerts have been triggered
|
||||
assert.Empty(t, triggeredTestAlerts)
|
||||
}
|
||||
|
||||
promProvider.Close()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -30,8 +30,6 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeText
|
||||
case "BYTEA":
|
||||
return sqlschema.DataTypeBytea
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
|
||||
@@ -2,7 +2,6 @@ package postgressqlschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
@@ -48,45 +47,50 @@ func (provider *provider) Operator() sqlschema.SQLOperator {
|
||||
}
|
||||
|
||||
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
|
||||
columns := []struct {
|
||||
ColumnName string `bun:"column_name"`
|
||||
Nullable bool `bun:"nullable"`
|
||||
SQLDataType string `bun:"udt_name"`
|
||||
DefaultVal *string `bun:"column_default"`
|
||||
}{}
|
||||
|
||||
err := provider.
|
||||
rows, err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewRaw(`
|
||||
QueryContext(ctx, `
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.is_nullable = 'YES' as nullable,
|
||||
c.is_nullable = 'YES',
|
||||
c.udt_name,
|
||||
c.column_default
|
||||
FROM
|
||||
information_schema.columns AS c
|
||||
WHERE
|
||||
c.table_name = ?`, string(tableName)).
|
||||
Scan(ctx, &columns)
|
||||
c.table_name = ?`, string(tableName))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if len(columns) == 0 {
|
||||
return nil, nil, sql.ErrNoRows
|
||||
}
|
||||
|
||||
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
||||
for _, column := range columns {
|
||||
columnDefault := ""
|
||||
if column.DefaultVal != nil {
|
||||
columnDefault = *column.DefaultVal
|
||||
defer func() {
|
||||
if err := rows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
columns := make([]*sqlschema.Column, 0)
|
||||
for rows.Next() {
|
||||
var (
|
||||
name string
|
||||
sqlDataType string
|
||||
nullable bool
|
||||
defaultVal *string
|
||||
)
|
||||
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
sqlschemaColumns = append(sqlschemaColumns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(column.ColumnName),
|
||||
Nullable: column.Nullable,
|
||||
DataType: provider.fmter.DataTypeOf(column.SQLDataType),
|
||||
columnDefault := ""
|
||||
if defaultVal != nil {
|
||||
columnDefault = *defaultVal
|
||||
}
|
||||
|
||||
columns = append(columns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(name),
|
||||
Nullable: nullable,
|
||||
DataType: provider.fmter.DataTypeOf(sqlDataType),
|
||||
Default: columnDefault,
|
||||
})
|
||||
}
|
||||
@@ -204,7 +208,7 @@ WHERE
|
||||
|
||||
return &sqlschema.Table{
|
||||
Name: tableName,
|
||||
Columns: sqlschemaColumns,
|
||||
Columns: columns,
|
||||
PrimaryKeyConstraint: primaryKeyConstraint,
|
||||
ForeignKeyConstraints: foreignKeyConstraints,
|
||||
}, uniqueConstraints, nil
|
||||
|
||||
@@ -1,153 +0,0 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgres(path)...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_typeof("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), "::text"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_agg("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, "::jsonb"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
|
||||
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
|
||||
var validParts []string
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
validParts = append(validParts, part)
|
||||
}
|
||||
}
|
||||
|
||||
if len(validParts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []byte
|
||||
|
||||
for idx, part := range validParts {
|
||||
if idx == len(validParts)-1 {
|
||||
if asText {
|
||||
result = append(result, "->>"...)
|
||||
} else {
|
||||
result = append(result, "->"...)
|
||||
}
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
return result
|
||||
}
|
||||
|
||||
result = append(result, "->"...)
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
@@ -1,500 +0,0 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `"data"->>'field'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `"metadata"->'user'->>'name'`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.level1.level2.level3",
|
||||
expected: `"json_col"->'level1'->'level2'->>'level3'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `"json_col"`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `"data"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `jsonb_typeof("data"->'field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `jsonb_typeof("data"->'items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `jsonb_each("metadata"->'object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "jsonb_agg(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "jsonb_agg(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "data->>'field'",
|
||||
expected: "jsonb_agg(data->>'field')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "jsonb_build_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "jsonb_build_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "jsonb_build_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonPath string
|
||||
asText bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path as text",
|
||||
jsonPath: "$.field",
|
||||
asText: true,
|
||||
expected: "->>'field'",
|
||||
},
|
||||
{
|
||||
name: "simple path as json",
|
||||
jsonPath: "$.field",
|
||||
asText: false,
|
||||
expected: "->'field'",
|
||||
},
|
||||
{
|
||||
name: "nested path as text",
|
||||
jsonPath: "$.user.name",
|
||||
asText: true,
|
||||
expected: "->'user'->>'name'",
|
||||
},
|
||||
{
|
||||
name: "nested path as json",
|
||||
jsonPath: "$.user.name",
|
||||
asText: false,
|
||||
expected: "->'user'->'name'",
|
||||
},
|
||||
{
|
||||
name: "deeply nested as text",
|
||||
jsonPath: "$.a.b.c.d",
|
||||
asText: true,
|
||||
expected: "->'a'->'b'->'c'->>'d'",
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
jsonPath: "$",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
jsonPath: "",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"::jsonb`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
expr: "name",
|
||||
expected: "lower(name)",
|
||||
},
|
||||
{
|
||||
name: "quoted column identifier",
|
||||
expr: `"column_name"`,
|
||||
expected: `lower("column_name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb text extraction",
|
||||
expr: "data->>'field'",
|
||||
expected: "lower(data->>'field')",
|
||||
},
|
||||
{
|
||||
name: "nested jsonb extraction",
|
||||
expr: "metadata->'user'->>'name'",
|
||||
expected: "lower(metadata->'user'->>'name')",
|
||||
},
|
||||
{
|
||||
name: "jsonb_typeof expression",
|
||||
expr: "jsonb_typeof(data->'field')",
|
||||
expected: "lower(jsonb_typeof(data->'field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
{
|
||||
name: "subquery column",
|
||||
expr: "users.email",
|
||||
expected: "lower(users.email)",
|
||||
},
|
||||
{
|
||||
name: "quoted identifier with special chars",
|
||||
expr: `"user-name"`,
|
||||
expected: `lower("user-name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb to text cast",
|
||||
expr: "data::text",
|
||||
expected: "lower(data::text)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,11 +15,10 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -56,14 +55,11 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
|
||||
sqldb := stdlib.OpenDBFromPool(pool)
|
||||
|
||||
pgDialect := pgdialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -79,10 +75,6 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package zeus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
@@ -24,17 +24,17 @@ func Config() zeus.Config {
|
||||
once.Do(func() {
|
||||
parsedURL, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus URL"))
|
||||
panic(fmt.Errorf("invalid zeus URL: %w", err))
|
||||
}
|
||||
|
||||
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
|
||||
if err != nil {
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus deprecated URL"))
|
||||
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
|
||||
}
|
||||
|
||||
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus config"))
|
||||
panic(fmt.Errorf("invalid zeus config: %w", err))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -1,484 +0,0 @@
|
||||
# Persona
|
||||
You are an expert developer with deep knowledge of Jest, React Testing Library, MSW, and TypeScript, tasked with creating unit tests for this repository.
|
||||
|
||||
# Auto-detect TypeScript Usage
|
||||
Check for TypeScript in the project through tsconfig.json or package.json dependencies.
|
||||
Adjust syntax based on this detection.
|
||||
|
||||
# TypeScript Type Safety for Jest Tests
|
||||
**CRITICAL**: All Jest tests MUST be fully type-safe with proper TypeScript types.
|
||||
|
||||
**Type Safety Requirements:**
|
||||
- Use proper TypeScript interfaces for all mock data
|
||||
- Type all Jest mock functions with `jest.MockedFunction<T>`
|
||||
- Use generic types for React components and hooks
|
||||
- Define proper return types for mock functions
|
||||
- Use `as const` for literal types when needed
|
||||
- Avoid `any` type – use proper typing instead
|
||||
|
||||
# Unit Testing Focus
|
||||
Focus on critical functionality (business logic, utility functions, component behavior)
|
||||
Mock dependencies (API calls, external modules) before imports
|
||||
Test multiple data scenarios (valid inputs, invalid inputs, edge cases)
|
||||
Write maintainable tests with descriptive names grouped in describe blocks
|
||||
|
||||
# Global vs Local Mocks
|
||||
**Use Global Mocks for:**
|
||||
- High-frequency dependencies (20+ test files)
|
||||
- Core infrastructure (react-router-dom, react-query, antd)
|
||||
- Standard implementations across the app
|
||||
- Browser APIs (ResizeObserver, matchMedia, localStorage)
|
||||
- Utility libraries (date-fns, lodash)
|
||||
|
||||
**Use Local Mocks for:**
|
||||
- Business logic dependencies (5-15 test files)
|
||||
- Test-specific behavior (different data per test)
|
||||
- API endpoints with specific responses
|
||||
- Domain-specific components
|
||||
- Error scenarios and edge cases
|
||||
|
||||
**Global Mock Files Available (from jest.config.ts):**
|
||||
- `uplot` → `__mocks__/uplotMock.ts`
|
||||
|
||||
# Repo-specific Testing Conventions
|
||||
|
||||
## Imports
|
||||
Always import from our harness:
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
```
|
||||
For API mocks:
|
||||
```ts
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
```
|
||||
Do not import directly from `@testing-library/react`.
|
||||
|
||||
## Router
|
||||
Use the router built into render:
|
||||
```ts
|
||||
render(<Page />, undefined, { initialRoute: '/traces-explorer' });
|
||||
```
|
||||
Only mock `useLocation` / `useParams` if the test depends on them.
|
||||
|
||||
## Hook Mocks
|
||||
Pattern:
|
||||
```ts
|
||||
import useFoo from 'hooks/useFoo';
|
||||
jest.mock('hooks/useFoo');
|
||||
const mockUseFoo = jest.mocked(useFoo);
|
||||
mockUseFoo.mockReturnValue(/* minimal shape */ as any);
|
||||
```
|
||||
Prefer helpers (`rqSuccess`, `rqLoading`, `rqError`) for React Query results.
|
||||
|
||||
## MSW
|
||||
Global MSW server runs automatically.
|
||||
Override per-test:
|
||||
```ts
|
||||
server.use(
|
||||
rest.get('*/api/v1/foo', (_req, res, ctx) => res(ctx.status(200), ctx.json({ ok: true })))
|
||||
);
|
||||
```
|
||||
Keep large responses in `mocks-server/__mockdata_`.
|
||||
|
||||
## Interactions
|
||||
- Prefer `userEvent` for real user interactions (click, type, select, tab).
|
||||
- Use `fireEvent` only for low-level/programmatic events not covered by `userEvent` (e.g., scroll, resize, setting `element.scrollTop` for virtualization). Wrap in `act(...)` if needed.
|
||||
- Always await interactions:
|
||||
```ts
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
await user.click(screen.getByRole('button', { name: /save/i }));
|
||||
```
|
||||
|
||||
```ts
|
||||
// Example: virtualized list scroll (no userEvent helper)
|
||||
const scroller = container.querySelector('[data-test-id="virtuoso-scroller"]') as HTMLElement;
|
||||
scroller.scrollTop = targetScrollTop;
|
||||
act(() => { fireEvent.scroll(scroller); });
|
||||
```
|
||||
|
||||
## Timers
|
||||
❌ No global fake timers.
|
||||
✅ Per-test only, for debounce/throttle:
|
||||
```ts
|
||||
jest.useFakeTimers();
|
||||
const user = userEvent.setup({ advanceTimers: (ms) => jest.advanceTimersByTime(ms) });
|
||||
await user.type(screen.getByRole('textbox'), 'query');
|
||||
jest.advanceTimersByTime(400);
|
||||
jest.useRealTimers();
|
||||
```
|
||||
|
||||
## Queries
|
||||
Prefer accessible queries (`getByRole`, `findByRole`, `getByLabelText`).
|
||||
Fallback: visible text.
|
||||
Last resort: `data-testid`.
|
||||
|
||||
# Example Test (using only configured global mocks)
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
import MyComponent from '../MyComponent';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders and interacts', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
|
||||
);
|
||||
|
||||
render(<MyComponent />, undefined, { initialRoute: '/foo' });
|
||||
|
||||
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
|
||||
await user.click(screen.getByRole('button', { name: /refresh/i }));
|
||||
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
# Anti-patterns
|
||||
❌ Importing RTL directly
|
||||
❌ Using global fake timers
|
||||
❌ Wrapping render in `act(...)`
|
||||
❌ Mocking infra dependencies locally (router, react-query)
|
||||
✅ Use our harness (`tests/test-utils`)
|
||||
✅ Use MSW for API overrides
|
||||
✅ Use userEvent + await
|
||||
✅ Pin time only in tests that assert relative dates
|
||||
|
||||
# Best Practices
|
||||
- **Critical Functionality**: Prioritize testing business logic and utilities
|
||||
- **Dependency Mocking**: Global mocks for infra, local mocks for business logic
|
||||
- **Data Scenarios**: Always test valid, invalid, and edge cases
|
||||
- **Descriptive Names**: Make test intent clear
|
||||
- **Organization**: Group related tests in describe
|
||||
- **Consistency**: Match repo conventions
|
||||
- **Edge Cases**: Test null, undefined, unexpected values
|
||||
- **Limit Scope**: 3–5 focused tests per file
|
||||
- **Use Helpers**: `rqSuccess`, `makeUser`, etc.
|
||||
- **No Any**: Enforce type safety
|
||||
|
||||
# Example Test
|
||||
```ts
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
import { server, rest } from 'mocks-server/server';
|
||||
import MyComponent from '../MyComponent';
|
||||
|
||||
describe('MyComponent', () => {
|
||||
it('renders and interacts', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(
|
||||
rest.get('*/api/v1/example', (_req, res, ctx) => res(ctx.status(200), ctx.json({ value: 42 })))
|
||||
);
|
||||
|
||||
render(<MyComponent />, undefined, { initialRoute: '/foo' });
|
||||
|
||||
expect(await screen.findByText(/value: 42/i)).toBeInTheDocument();
|
||||
await user.click(screen.getByRole('button', { name: /refresh/i }));
|
||||
await waitFor(() => expect(screen.getByText(/loading/i)).toBeInTheDocument());
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
# Anti-patterns
|
||||
❌ Importing RTL directly
|
||||
❌ Using global fake timers
|
||||
❌ Wrapping render in `act(...)`
|
||||
❌ Mocking infra dependencies locally (router, react-query)
|
||||
✅ Use our harness (`tests/test-utils`)
|
||||
✅ Use MSW for API overrides
|
||||
✅ Use userEvent + await
|
||||
✅ Pin time only in tests that assert relative dates
|
||||
|
||||
# TypeScript Type Safety Examples
|
||||
|
||||
## Proper Mock Typing
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed mocks
|
||||
interface User {
|
||||
id: number;
|
||||
name: string;
|
||||
email: string;
|
||||
}
|
||||
|
||||
interface ApiResponse<T> {
|
||||
data: T;
|
||||
status: number;
|
||||
message: string;
|
||||
}
|
||||
|
||||
// Type the mock functions
|
||||
const mockFetchUser = jest.fn() as jest.MockedFunction<(id: number) => Promise<ApiResponse<User>>>;
|
||||
const mockUpdateUser = jest.fn() as jest.MockedFunction<(user: User) => Promise<ApiResponse<User>>>;
|
||||
|
||||
// Mock implementation with proper typing
|
||||
mockFetchUser.mockResolvedValue({
|
||||
data: { id: 1, name: 'John Doe', email: 'john@example.com' },
|
||||
status: 200,
|
||||
message: 'Success'
|
||||
});
|
||||
|
||||
// ❌ BAD - Using any type
|
||||
const mockFetchUser = jest.fn() as any; // Don't do this
|
||||
```
|
||||
|
||||
## React Component Testing with Types
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed component testing
|
||||
interface ComponentProps {
|
||||
title: string;
|
||||
data: User[];
|
||||
onUserSelect: (user: User) => void;
|
||||
isLoading?: boolean;
|
||||
}
|
||||
|
||||
const TestComponent: React.FC<ComponentProps> = ({ title, data, onUserSelect, isLoading = false }) => {
|
||||
// Component implementation
|
||||
};
|
||||
|
||||
describe('TestComponent', () => {
|
||||
it('should render with proper props', () => {
|
||||
// Arrange - Type the props properly
|
||||
const mockProps: ComponentProps = {
|
||||
title: 'Test Title',
|
||||
data: [{ id: 1, name: 'John', email: 'john@example.com' }],
|
||||
onUserSelect: jest.fn() as jest.MockedFunction<(user: User) => void>,
|
||||
isLoading: false
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<TestComponent {...mockProps} />);
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Test Title')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Hook Testing with Types
|
||||
```ts
|
||||
// ✅ GOOD - Properly typed hook testing
|
||||
interface UseUserDataReturn {
|
||||
user: User | null;
|
||||
loading: boolean;
|
||||
error: string | null;
|
||||
refetch: () => void;
|
||||
}
|
||||
|
||||
const useUserData = (id: number): UseUserDataReturn => {
|
||||
// Hook implementation
|
||||
};
|
||||
|
||||
describe('useUserData', () => {
|
||||
it('should return user data with proper typing', () => {
|
||||
// Arrange
|
||||
const mockUser: User = { id: 1, name: 'John', email: 'john@example.com' };
|
||||
mockFetchUser.mockResolvedValue({
|
||||
data: mockUser,
|
||||
status: 200,
|
||||
message: 'Success'
|
||||
});
|
||||
|
||||
// Act
|
||||
const { result } = renderHook(() => useUserData(1));
|
||||
|
||||
// Assert
|
||||
expect(result.current.user).toEqual(mockUser);
|
||||
expect(result.current.loading).toBe(false);
|
||||
expect(result.current.error).toBeNull();
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Global Mock Type Safety
|
||||
```ts
|
||||
// ✅ GOOD - Type-safe global mocks
|
||||
// In __mocks__/routerMock.ts
|
||||
export const mockUseLocation = (overrides: Partial<Location> = {}): Location => ({
|
||||
pathname: '/traces',
|
||||
search: '',
|
||||
hash: '',
|
||||
state: null,
|
||||
key: 'test-key',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
// In test files
|
||||
const location = useLocation(); // Properly typed from global mock
|
||||
expect(location.pathname).toBe('/traces');
|
||||
```
|
||||
|
||||
# TypeScript Configuration for Jest
|
||||
|
||||
## Required Jest Configuration
|
||||
```json
|
||||
// jest.config.ts
|
||||
{
|
||||
"preset": "ts-jest/presets/js-with-ts-esm",
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"useESM": true,
|
||||
"isolatedModules": true,
|
||||
"tsconfig": "<rootDir>/tsconfig.jest.json"
|
||||
}
|
||||
},
|
||||
"extensionsToTreatAsEsm": [".ts", ".tsx"],
|
||||
"moduleFileExtensions": ["ts", "tsx", "js", "json"]
|
||||
}
|
||||
```
|
||||
|
||||
## TypeScript Jest Configuration
|
||||
```json
|
||||
// tsconfig.jest.json
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": ["jest", "@testing-library/jest-dom"],
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"moduleResolution": "node"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*",
|
||||
"**/*.test.ts",
|
||||
"**/*.test.tsx",
|
||||
"__mocks__/**/*"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Common Type Safety Patterns
|
||||
|
||||
### Mock Function Typing
|
||||
```ts
|
||||
// ✅ GOOD - Proper mock function typing
|
||||
const mockApiCall = jest.fn() as jest.MockedFunction<typeof apiCall>;
|
||||
const mockEventHandler = jest.fn() as jest.MockedFunction<(event: Event) => void>;
|
||||
|
||||
// ❌ BAD - Using any
|
||||
const mockApiCall = jest.fn() as any;
|
||||
```
|
||||
|
||||
### Generic Mock Typing
|
||||
```ts
|
||||
// ✅ GOOD - Generic mock typing
|
||||
interface MockApiResponse<T> {
|
||||
data: T;
|
||||
status: number;
|
||||
}
|
||||
|
||||
const mockFetchData = jest.fn() as jest.MockedFunction<
|
||||
<T>(endpoint: string) => Promise<MockApiResponse<T>>
|
||||
>;
|
||||
|
||||
// Usage
|
||||
mockFetchData<User>('/users').mockResolvedValue({
|
||||
data: { id: 1, name: 'John' },
|
||||
status: 200
|
||||
});
|
||||
```
|
||||
|
||||
### React Testing Library with Types
|
||||
```ts
|
||||
// ✅ GOOD - Typed testing utilities
|
||||
import { render, screen, RenderResult } from '@testing-library/react';
|
||||
import { ComponentProps } from 'react';
|
||||
|
||||
type TestComponentProps = ComponentProps<typeof TestComponent>;
|
||||
|
||||
const renderTestComponent = (props: Partial<TestComponentProps> = {}): RenderResult => {
|
||||
const defaultProps: TestComponentProps = {
|
||||
title: 'Test',
|
||||
data: [],
|
||||
onSelect: jest.fn(),
|
||||
...props
|
||||
};
|
||||
|
||||
return render(<TestComponent {...defaultProps} />);
|
||||
};
|
||||
```
|
||||
|
||||
### Error Handling with Types
|
||||
```ts
|
||||
// ✅ GOOD - Typed error handling
|
||||
interface ApiError {
|
||||
message: string;
|
||||
code: number;
|
||||
details?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const mockApiError: ApiError = {
|
||||
message: 'API Error',
|
||||
code: 500,
|
||||
details: { endpoint: '/users' }
|
||||
};
|
||||
|
||||
mockFetchUser.mockRejectedValue(new Error(JSON.stringify(mockApiError)));
|
||||
```
|
||||
|
||||
## Type Safety Checklist
|
||||
- [ ] All mock functions use `jest.MockedFunction<T>`
|
||||
- [ ] All mock data has proper interfaces
|
||||
- [ ] No `any` types in test files
|
||||
- [ ] Generic types are used where appropriate
|
||||
- [ ] Error types are properly defined
|
||||
- [ ] Component props are typed
|
||||
- [ ] Hook return types are defined
|
||||
- [ ] API response types are defined
|
||||
- [ ] Global mocks are type-safe
|
||||
- [ ] Test utilities are properly typed
|
||||
|
||||
# Mock Decision Tree
|
||||
```
|
||||
Is it used in 20+ test files?
|
||||
├─ YES → Use Global Mock
|
||||
│ ├─ react-router-dom
|
||||
│ ├─ react-query
|
||||
│ ├─ antd components
|
||||
│ └─ browser APIs
|
||||
│
|
||||
└─ NO → Is it business logic?
|
||||
├─ YES → Use Local Mock
|
||||
│ ├─ API endpoints
|
||||
│ ├─ Custom hooks
|
||||
│ └─ Domain components
|
||||
│
|
||||
└─ NO → Is it test-specific?
|
||||
├─ YES → Use Local Mock
|
||||
│ ├─ Error scenarios
|
||||
│ ├─ Loading states
|
||||
│ └─ Specific data
|
||||
│
|
||||
└─ NO → Consider Global Mock
|
||||
└─ If it becomes frequently used
|
||||
```
|
||||
|
||||
# Common Anti-Patterns to Avoid
|
||||
|
||||
❌ **Don't mock global dependencies locally:**
|
||||
```js
|
||||
// BAD - This is already globally mocked
|
||||
jest.mock('react-router-dom', () => ({ ... }));
|
||||
```
|
||||
|
||||
❌ **Don't create global mocks for test-specific data:**
|
||||
```js
|
||||
// BAD - This should be local
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => specificTestData)
|
||||
}));
|
||||
```
|
||||
|
||||
✅ **Do use global mocks for infrastructure:**
|
||||
```js
|
||||
// GOOD - Use global mock
|
||||
import { useLocation } from 'react-router-dom';
|
||||
```
|
||||
|
||||
✅ **Do create local mocks for business logic:**
|
||||
```js
|
||||
// GOOD - Local mock for specific test needs
|
||||
jest.mock('../api/tracesService', () => ({
|
||||
getTraces: jest.fn(() => mockTracesData)
|
||||
}));
|
||||
```
|
||||
@@ -1,7 +1,4 @@
|
||||
node_modules
|
||||
build
|
||||
*.typegen.ts
|
||||
i18-generate-hash.js
|
||||
src/parser/TraceOperatorParser/**
|
||||
|
||||
orval.config.ts
|
||||
i18-generate-hash.js
|
||||
@@ -1,8 +1,5 @@
|
||||
/**
|
||||
* ESLint Configuration for SigNoz Frontend
|
||||
*/
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
@@ -10,12 +7,16 @@ module.exports = {
|
||||
'jest/globals': true,
|
||||
},
|
||||
extends: [
|
||||
'airbnb',
|
||||
'airbnb-typescript',
|
||||
'eslint:recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:react-hooks/recommended',
|
||||
'plugin:@typescript-eslint/eslint-recommended',
|
||||
'plugin:prettier/recommended',
|
||||
'plugin:sonarjs/recommended',
|
||||
'plugin:import/errors',
|
||||
'plugin:import/warnings',
|
||||
'plugin:react/jsx-runtime',
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
@@ -24,21 +25,16 @@ module.exports = {
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
ecmaVersion: 2021,
|
||||
ecmaVersion: 12,
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: [
|
||||
'react', // React-specific rules
|
||||
'@typescript-eslint', // TypeScript linting
|
||||
'simple-import-sort', // Auto-sort imports
|
||||
'react-hooks', // React Hooks rules
|
||||
'prettier', // Code formatting
|
||||
'jest', // Jest test rules
|
||||
'jsx-a11y', // Accessibility rules
|
||||
'import', // Import/export linting
|
||||
'sonarjs', // Code quality/complexity
|
||||
// TODO: Uncomment after running: yarn add -D eslint-plugin-spellcheck
|
||||
// 'spellcheck', // Correct spellings
|
||||
'react',
|
||||
'@typescript-eslint',
|
||||
'simple-import-sort',
|
||||
'react-hooks',
|
||||
'prettier',
|
||||
'jest',
|
||||
],
|
||||
settings: {
|
||||
react: {
|
||||
@@ -52,109 +48,77 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
// Code quality rules
|
||||
'prefer-const': 'error', // Enforces const for variables never reassigned
|
||||
'no-var': 'error', // Disallows var, enforces let/const
|
||||
'no-else-return': ['error', { allowElseIf: false }], // Reduces nesting by disallowing else after return
|
||||
'no-cond-assign': 'error', // Prevents accidental assignment in conditions (if (x = 1) instead of if (x === 1))
|
||||
'no-debugger': 'error', // Disallows debugger statements in production code
|
||||
curly: 'error', // Requires curly braces for all control statements
|
||||
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
|
||||
|
||||
// TypeScript rules
|
||||
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
// Disallows unused variables/args
|
||||
'error',
|
||||
{
|
||||
argsIgnorePattern: '^_', // Allows unused args prefixed with _ (e.g., _unusedParam)
|
||||
varsIgnorePattern: '^_', // Allows unused vars prefixed with _ (e.g., _unusedVar)
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-explicit-any': 'warn', // Warns when using 'any' type (consider upgrading to error)
|
||||
// TODO: Change to 'error' after fixing ~80 empty function placeholders in providers/contexts
|
||||
'@typescript-eslint/no-empty-function': 'off', // Disallows empty function bodies
|
||||
'@typescript-eslint/no-var-requires': 'error', // Disallows require() in TypeScript (use import instead)
|
||||
'@typescript-eslint/ban-ts-comment': 'off', // Allows @ts-ignore comments (sometimes needed for third-party libs)
|
||||
'no-empty-function': 'off', // Disabled in favor of TypeScript version above
|
||||
|
||||
// React rules
|
||||
'react/jsx-filename-extension': [
|
||||
'error',
|
||||
{
|
||||
extensions: ['.tsx', '.jsx'], // Warns if JSX is used in non-.jsx/.tsx files
|
||||
extensions: ['.tsx', '.js', '.jsx'],
|
||||
},
|
||||
],
|
||||
'react/prop-types': 'off', // Disabled - using TypeScript instead
|
||||
'react/jsx-props-no-spreading': 'off', // Allows {...props} spreading (common in HOCs, forms, wrappers)
|
||||
'react/no-array-index-key': 'error', // Prevents using array index as key (causes bugs when list changes)
|
||||
|
||||
// Accessibility rules
|
||||
'jsx-a11y/label-has-associated-control': [
|
||||
'react/prop-types': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'error',
|
||||
'@typescript-eslint/no-var-requires': 'error',
|
||||
'react/no-array-index-key': 'error',
|
||||
'linebreak-style': [
|
||||
'error',
|
||||
{
|
||||
required: {
|
||||
some: ['nesting', 'id'], // Labels must either wrap inputs or use htmlFor/id
|
||||
},
|
||||
},
|
||||
process.env.platform === 'win32' ? 'windows' : 'unix',
|
||||
],
|
||||
'@typescript-eslint/default-param-last': 'off',
|
||||
|
||||
// React Hooks rules
|
||||
'react-hooks/rules-of-hooks': 'error', // Enforces Rules of Hooks (only call at top level)
|
||||
'react-hooks/exhaustive-deps': 'warn', // Warns about missing dependencies in useEffect/useMemo/useCallback
|
||||
// simple sort error
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
|
||||
// Import/export rules
|
||||
// hooks
|
||||
'react-hooks/rules-of-hooks': 'error',
|
||||
'react-hooks/exhaustive-deps': 'error',
|
||||
|
||||
// airbnb
|
||||
'no-underscore-dangle': 'off',
|
||||
'no-console': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'never', // Disallows .js extension in imports
|
||||
jsx: 'never', // Disallows .jsx extension in imports
|
||||
ts: 'never', // Disallows .ts extension in imports
|
||||
tsx: 'never', // Disallows .tsx extension in imports
|
||||
js: 'never',
|
||||
jsx: 'never',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: true }], // Prevents importing packages not in package.json
|
||||
// 'import/no-cycle': 'warn', // TODO: Enable later to detect circular dependencies
|
||||
|
||||
// Import sorting rules
|
||||
'simple-import-sort/imports': [
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: true }],
|
||||
'no-plusplus': 'off',
|
||||
'jsx-a11y/label-has-associated-control': [
|
||||
'error',
|
||||
{
|
||||
groups: [
|
||||
['^react', '^@?\\w'], // React first, then external packages
|
||||
['^@/'], // Absolute imports with @ alias
|
||||
['^\\u0000'], // Side effect imports (import './file')
|
||||
['^\\.'], // Relative imports
|
||||
['^.+\\.s?css$'], // Style imports
|
||||
],
|
||||
required: {
|
||||
some: ['nesting', 'id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
'simple-import-sort/exports': 'error', // Auto-sorts exports
|
||||
'jsx-a11y/label-has-for': [
|
||||
'error',
|
||||
{
|
||||
required: {
|
||||
some: ['nesting', 'id'],
|
||||
},
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-unused-vars': 'error',
|
||||
'func-style': ['error', 'declaration', { allowArrowFunctions: true }],
|
||||
'arrow-body-style': ['error', 'as-needed'],
|
||||
|
||||
// Prettier - code formatting
|
||||
// eslint rules need to remove
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
|
||||
'consistent-return': 'off',
|
||||
'prettier/prettier': [
|
||||
'error',
|
||||
{},
|
||||
{
|
||||
usePrettierrc: true, // Uses .prettierrc.json for formatting rules
|
||||
usePrettierrc: true,
|
||||
},
|
||||
],
|
||||
|
||||
// SonarJS - code quality and complexity
|
||||
'sonarjs/no-duplicate-string': 'off', // Disabled - can be noisy (enable periodically to check)
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['src/api/generated/**/*.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'no-nested-ternary': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'warn',
|
||||
'sonarjs/no-duplicate-string': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -10,6 +10,4 @@ public/
|
||||
**/*.json
|
||||
|
||||
# Ignore all files in parser folder:
|
||||
src/parser/**
|
||||
|
||||
src/TraceOperator/parser/**
|
||||
src/parser/**
|
||||
@@ -4,14 +4,5 @@
|
||||
"tabWidth": 1,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"semi": true,
|
||||
"printWidth": 80,
|
||||
"bracketSpacing": true,
|
||||
"jsxBracketSameLine": false,
|
||||
"arrowParens": "always",
|
||||
"endOfLine": "lf",
|
||||
"quoteProps": "as-needed",
|
||||
"proseWrap": "preserve",
|
||||
"htmlWhitespaceSensitivity": "css",
|
||||
"embeddedLanguageFormatting": "auto"
|
||||
"semi": true
|
||||
}
|
||||
|
||||
8
frontend/.vscode/settings.json
vendored
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"prettier.requireConfig": true
|
||||
}
|
||||
@@ -34,18 +34,12 @@ Embrace the spirit of collaboration and contribute to the success of our open-so
|
||||
### Linting and Setup
|
||||
|
||||
- It is crucial to refrain from disabling ESLint and TypeScript errors within the project. If there is a specific rule that needs to be disabled, provide a clear and justified explanation for doing so. Maintaining the integrity of the linting and type-checking processes ensures code quality and consistency throughout the codebase.
|
||||
- In our project, we rely on several essential ESLint plugins and configurations:
|
||||
- In our project, we rely on several essential ESLint plugins, namely:
|
||||
- [plugin:@typescript-eslint](https://typescript-eslint.io/rules/)
|
||||
- [airbnb styleguide](https://github.com/airbnb/javascript)
|
||||
- [plugin:sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs)
|
||||
|
||||
- [eslint:recommended](https://eslint.org/docs/latest/rules/) - Core ESLint rules for JavaScript best practices
|
||||
- [plugin:@typescript-eslint](https://typescript-eslint.io/rules/) - TypeScript-specific linting rules
|
||||
- [plugin:react](https://github.com/jsx-eslint/eslint-plugin-react) - React best practices and patterns
|
||||
- [plugin:react-hooks](https://www.npmjs.com/package/eslint-plugin-react-hooks) - Rules of Hooks enforcement
|
||||
- [plugin:sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs) - Code quality and complexity analysis
|
||||
- [plugin:prettier](https://github.com/prettier/eslint-plugin-prettier) - Code formatting via Prettier
|
||||
- [simple-import-sort](https://github.com/lydell/eslint-plugin-simple-import-sort) - Automatic import organization
|
||||
- [plugin:jsx-a11y](https://github.com/jsx-eslint/eslint-plugin-jsx-a11y) - Accessibility rules for JSX elements
|
||||
|
||||
To ensure compliance with our coding standards and best practices, we encourage you to refer to the documentation of these plugins. Familiarizing yourself with the ESLint rules they provide will help maintain code quality and consistency throughout the project.
|
||||
To ensure compliance with our coding standards and best practices, we encourage you to refer to the documentation of these plugins. Familiarizing yourself with the ESLint rules they provide will help maintain code quality and consistency throughout the project.
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
|
||||
// Mock for uplot library used in tests
|
||||
export interface MockUPlotInstance {
|
||||
setData: jest.Mock;
|
||||
setSize: jest.Mock;
|
||||
destroy: jest.Mock;
|
||||
redraw: jest.Mock;
|
||||
setSeries: jest.Mock;
|
||||
}
|
||||
|
||||
export interface MockUPlotPaths {
|
||||
spline: jest.Mock;
|
||||
bars: jest.Mock;
|
||||
}
|
||||
|
||||
// Create mock instance methods
|
||||
const createMockUPlotInstance = (): MockUPlotInstance => ({
|
||||
setData: jest.fn(),
|
||||
setSize: jest.fn(),
|
||||
destroy: jest.fn(),
|
||||
redraw: jest.fn(),
|
||||
setSeries: jest.fn(),
|
||||
});
|
||||
|
||||
// Create mock paths
|
||||
const mockPaths: MockUPlotPaths = {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
};
|
||||
|
||||
// Mock static methods
|
||||
const mockTzDate = jest.fn(
|
||||
(date: Date, _timezone: string) => new Date(date.getTime()),
|
||||
);
|
||||
|
||||
// Mock uPlot constructor - this needs to be a proper constructor function
|
||||
function MockUPlot(
|
||||
_options: unknown,
|
||||
_data: unknown,
|
||||
_target: HTMLElement,
|
||||
): MockUPlotInstance {
|
||||
return createMockUPlotInstance();
|
||||
}
|
||||
|
||||
// Add static methods to the constructor
|
||||
MockUPlot.tzDate = mockTzDate;
|
||||
MockUPlot.paths = mockPaths;
|
||||
|
||||
// Export the constructor as default
|
||||
export default MockUPlot;
|
||||
@@ -1,29 +0,0 @@
|
||||
// Mock for useSafeNavigate hook to avoid React Router version conflicts in tests
|
||||
interface SafeNavigateOptions {
|
||||
replace?: boolean;
|
||||
state?: unknown;
|
||||
}
|
||||
|
||||
interface SafeNavigateTo {
|
||||
pathname?: string;
|
||||
search?: string;
|
||||
hash?: string;
|
||||
}
|
||||
|
||||
type SafeNavigateToType = string | SafeNavigateTo;
|
||||
|
||||
interface UseSafeNavigateReturn {
|
||||
safeNavigate: jest.MockedFunction<
|
||||
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
|
||||
>;
|
||||
}
|
||||
|
||||
export const useSafeNavigate = (): UseSafeNavigateReturn => ({
|
||||
safeNavigate: jest.fn(
|
||||
(to: SafeNavigateToType, options?: SafeNavigateOptions) => {
|
||||
console.log(`Mock safeNavigate called with:`, to, options);
|
||||
},
|
||||
) as jest.MockedFunction<
|
||||
(to: SafeNavigateToType, options?: SafeNavigateOptions) => void
|
||||
>,
|
||||
});
|
||||
@@ -3,6 +3,5 @@ BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
PYLON_IDENTITY_SECRET="pylon-identity-secret"
|
||||
|
||||
CI="1"
|
||||
@@ -1,9 +1,6 @@
|
||||
import type { Config } from '@jest/types';
|
||||
|
||||
const USE_SAFE_NAVIGATE_MOCK_PATH = '<rootDir>/__mocks__/useSafeNavigate.ts';
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
silent: true,
|
||||
clearMocks: true,
|
||||
coverageDirectory: 'coverage',
|
||||
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
|
||||
@@ -13,10 +10,6 @@ const config: Config.InitialOptions = {
|
||||
moduleNameMapper: {
|
||||
'\\.(css|less|scss)$': '<rootDir>/__mocks__/cssMock.ts',
|
||||
'\\.md$': '<rootDir>/__mocks__/cssMock.ts',
|
||||
'^uplot$': '<rootDir>/__mocks__/uplotMock.ts',
|
||||
'^hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^src/hooks/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
'^.*/useSafeNavigate$': USE_SAFE_NAVIGATE_MOCK_PATH,
|
||||
},
|
||||
globals: {
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
/**
|
||||
* When making changes to this file, remove this the file name from .eslintignore and tsconfig.json
|
||||
* The reason this is required because of the moduleResolution being "node". Changing this is a more detailed effort.
|
||||
* So, until then, we will keep this file ignored for eslint and typescript.
|
||||
*/
|
||||
|
||||
import { defineConfig } from 'orval';
|
||||
|
||||
export default defineConfig({
|
||||
signoz: {
|
||||
input: {
|
||||
target: '../docs/api/openapi.yml',
|
||||
},
|
||||
output: {
|
||||
target: './src/api/generated/services',
|
||||
client: 'react-query',
|
||||
httpClient: 'axios',
|
||||
mode: 'tags-split',
|
||||
prettier: true,
|
||||
headers: true,
|
||||
clean: true,
|
||||
override: {
|
||||
query: {
|
||||
useQuery: true,
|
||||
useMutation: true,
|
||||
useInvalidate: true,
|
||||
signal: true,
|
||||
useOperationIdAsQueryKey: true,
|
||||
},
|
||||
useDates: true,
|
||||
useNamedParameters: true,
|
||||
enumGenerationType: 'enum',
|
||||
mutator: {
|
||||
path: './src/api/index.ts',
|
||||
name: 'GeneratedAPIInstance',
|
||||
},
|
||||
|
||||
jsDoc: {
|
||||
filter: (schema) => {
|
||||
const allowlist = [
|
||||
'type',
|
||||
'format',
|
||||
'maxLength',
|
||||
'minLength',
|
||||
'description',
|
||||
'minimum',
|
||||
'maximum',
|
||||
'exclusiveMinimum',
|
||||
'exclusiveMaximum',
|
||||
'pattern',
|
||||
'nullable',
|
||||
'enum',
|
||||
];
|
||||
return Object.entries(schema || {})
|
||||
.filter(([key]) => allowlist.includes(key))
|
||||
.map(([key, value]: [string, any]) => ({
|
||||
key,
|
||||
value,
|
||||
}))
|
||||
.sort((a, b) => a.key.length - b.key.length);
|
||||
},
|
||||
},
|
||||
|
||||
components: {
|
||||
schemas: {
|
||||
suffix: 'DTO',
|
||||
},
|
||||
responses: {
|
||||
suffix: 'Response',
|
||||
},
|
||||
parameters: {
|
||||
suffix: 'Params',
|
||||
},
|
||||
requestBodies: {
|
||||
suffix: 'Body',
|
||||
},
|
||||
},
|
||||
|
||||
// info is of type InfoObject from openapi spec
|
||||
header: (info: { title: string; version: string }): string[] => [
|
||||
`! Do not edit manually`,
|
||||
`* The file has been auto-generated using Orval for SigNoz`,
|
||||
`* regenerate with 'yarn generate:api'`,
|
||||
...(info.title ? [info.title] : []),
|
||||
...(info.version ? [`OpenAPI spec version: ${info.version}`] : []),
|
||||
],
|
||||
|
||||
// @ts-expect-error
|
||||
// propertySortOrder, urlEncodeParameters, aliasCombinedTypes
|
||||
// are valid options in the document without types
|
||||
propertySortOrder: 'Alphabetical',
|
||||
urlEncodeParameters: true,
|
||||
aliasCombinedTypes: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -14,12 +14,11 @@
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent",
|
||||
"generate:api": "orval --config ./orval.config.ts && sh scripts/post-types-generation.sh && prettier --write src/api/generated && (eslint --fix src/api/generated || true)"
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0"
|
||||
@@ -39,25 +38,18 @@
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@playwright/test": "1.55.1",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
"@sentry/webpack-plugin": "2.22.6",
|
||||
"@signozhq/badge": "0.0.2",
|
||||
"@signozhq/button": "0.0.2",
|
||||
"@signozhq/calendar": "0.0.0",
|
||||
"@signozhq/callout": "0.0.2",
|
||||
"@signozhq/checkbox": "0.0.2",
|
||||
"@signozhq/combobox": "0.0.2",
|
||||
"@signozhq/command": "0.0.0",
|
||||
"@signozhq/design-tokens": "2.1.1",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/input": "0.0.2",
|
||||
"@signozhq/popover": "0.0.0",
|
||||
"@signozhq/resizable": "0.0.0",
|
||||
"@signozhq/sonner": "0.1.0",
|
||||
"@signozhq/table": "0.3.7",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@signozhq/table": "0.3.4",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
@@ -73,7 +65,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"antlr4": "4.13.2",
|
||||
"axios": "1.12.0",
|
||||
"axios": "1.8.2",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -87,7 +79,6 @@
|
||||
"color": "^4.2.1",
|
||||
"color-alpha": "1.1.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"crypto-js": "4.2.0",
|
||||
"css-loader": "5.0.0",
|
||||
"css-minimizer-webpack-plugin": "5.0.1",
|
||||
"d3-hierarchy": "3.1.2",
|
||||
@@ -105,9 +96,9 @@
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
"immer": "11.1.3",
|
||||
"jest": "^27.5.1",
|
||||
"js-base64": "^3.7.2",
|
||||
"kbar": "0.1.0-beta.48",
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
@@ -117,7 +108,7 @@
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.298.0",
|
||||
"posthog-js": "1.215.5",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
@@ -144,7 +135,6 @@
|
||||
"redux": "^4.0.5",
|
||||
"redux-thunk": "^2.3.0",
|
||||
"rehype-raw": "7.0.0",
|
||||
"rrule": "2.8.1",
|
||||
"stream": "^0.0.2",
|
||||
"style-loader": "1.3.0",
|
||||
"styled-components": "^5.3.11",
|
||||
@@ -154,6 +144,7 @@
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.1",
|
||||
"typescript": "^4.0.5",
|
||||
"uplot": "1.6.31",
|
||||
"userpilot": "1.3.9",
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
@@ -190,7 +181,6 @@
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/compression-webpack-plugin": "^9.0.0",
|
||||
"@types/copy-webpack-plugin": "^8.0.1",
|
||||
"@types/crypto-js": "4.2.2",
|
||||
"@types/dompurify": "^2.4.0",
|
||||
"@types/event-source-polyfill": "^1.0.0",
|
||||
"@types/fontfaceobserver": "2.1.0",
|
||||
@@ -222,11 +212,16 @@
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-airbnb-typescript": "^16.1.4",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-config-standard": "^16.0.3",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
"eslint-plugin-jest": "^26.9.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.5.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
"eslint-plugin-react": "^7.24.0",
|
||||
"eslint-plugin-react-hooks": "^4.3.0",
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||
@@ -240,7 +235,6 @@
|
||||
"lint-staged": "^12.5.0",
|
||||
"msw": "1.3.2",
|
||||
"npm-run-all": "latest",
|
||||
"orval": "7.18.0",
|
||||
"portfinder-sync": "^0.0.2",
|
||||
"postcss": "8.4.38",
|
||||
"prettier": "2.2.1",
|
||||
@@ -279,9 +273,6 @@
|
||||
"serialize-javascript": "6.0.2",
|
||||
"prismjs": "1.30.0",
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2",
|
||||
"on-headers": "^1.1.0",
|
||||
"tmp": "0.2.4"
|
||||
"form-data": "4.0.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
|
||||
<svg
|
||||
width="32"
|
||||
height="32"
|
||||
viewBox="0 0 32 32"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M19.11 16.8483L14.0369 17.7304C14.0369 17.7304 12.3481 22.0324 12.2437 22.3235C12.1392 22.6146 12.1437 23.0746 12.6037 23.0746C13.1881 23.0746 16.1546 23.0591 16.1546 23.0591C16.1546 23.0591 15.4346 26.5322 15.3924 26.8433C15.3502 27.1544 15.6835 27.4277 15.9768 27.1144C16.2701 26.8011 20.3121 21.6058 20.4877 21.3525C20.801 20.8992 20.4988 20.4814 20.1433 20.4614C19.7877 20.4414 17.4056 20.4925 17.4056 20.4925L19.11 16.8483Z"
|
||||
fill="#FECA18"
|
||||
/>
|
||||
<path
|
||||
d="M17.7589 17.4527C17.7589 17.4527 16.6279 19.9548 16.5856 20.097C16.4612 20.5192 17.0078 20.6903 17.1634 20.3481C17.3189 20.0037 18.6655 17.2194 18.6655 17.2194L17.7589 17.4527Z"
|
||||
fill="#FDB900"
|
||||
/>
|
||||
<path
|
||||
d="M12.8859 22.2592C13.1836 22.2503 15.7968 22.2436 16.0146 22.2281C16.4213 22.1969 16.4835 22.7591 16.0146 22.7591C15.528 22.7591 12.9637 22.768 12.8081 22.7747C12.4481 22.7925 12.3992 22.2747 12.8859 22.2592Z"
|
||||
fill="#FDB900"
|
||||
/>
|
||||
<path
|
||||
d="M14.9813 17.1127C14.9813 17.1127 13.6481 20.4592 13.5725 20.7103C13.2592 21.7591 14.3858 21.728 14.6836 21.1325C14.8302 20.837 16.2635 17.8016 16.3257 17.2527C16.3879 16.7061 14.9813 17.1127 14.9813 17.1127Z"
|
||||
fill="#FFE36A"
|
||||
/>
|
||||
<path
|
||||
d="M15.3347 21.0148C15.1436 20.8837 14.9125 20.9992 14.7725 21.2192C14.6325 21.4392 14.428 21.797 14.7414 21.9858C15.0236 22.1547 15.2724 21.8147 15.3835 21.657C15.4924 21.4992 15.6324 21.217 15.3347 21.0148Z"
|
||||
fill="#FFE36A"
|
||||
/>
|
||||
<path
|
||||
d="M17.6301 21.7326C17.1212 21.6237 16.9568 22.0459 16.8479 22.5459C16.739 23.0459 16.3302 24.6636 16.2546 25.0635C16.1457 25.6257 16.6612 25.7057 16.8812 25.188C17.0457 24.8013 17.759 23.057 17.8501 22.7792C17.9901 22.3592 18.1456 21.8437 17.6301 21.7326Z"
|
||||
fill="#FFE36A"
|
||||
/>
|
||||
<path
|
||||
d="M25.7585 12.0573C25.7585 12.0573 26.3363 4.28441 19.69 3.2978C13.7147 2.41118 12.5415 8.08421 12.5415 8.08421C12.5415 8.08421 10.2838 7.55757 8.66166 9.00639C7.05064 10.4463 7.17508 12.1507 7.17508 12.1507C7.17508 12.1507 3.20195 11.524 2.79531 14.935C2.41533 18.1215 7.11286 17.3282 7.11286 17.3282L29.4183 15.686C29.4183 15.686 29.9472 14.0106 28.2606 12.7462C27.2585 11.9929 25.7585 12.0573 25.7585 12.0573Z"
|
||||
fill="#E4EAEE"
|
||||
/>
|
||||
<path
|
||||
d="M13.7347 13.8196C13.9213 13.7574 14.9857 14.6662 18.0522 14.6951C22.2653 14.7373 25.4563 12.2552 25.4563 12.2552C25.4563 12.2552 25.5629 13.0108 25.2274 13.5485C24.8096 14.2151 24.163 14.3418 24.163 14.3418C24.163 14.3418 25.3318 15.1551 26.9362 15.0307C28.3828 14.9173 29.4294 14.4262 29.4294 14.4262C29.4294 14.4262 29.5694 15.1395 29.4916 15.8351C29.3361 17.2217 28.5228 17.7861 27.6251 17.8883C26.9651 17.9638 21.3276 17.9905 19.0122 18.0127C16.9256 18.0327 6.29285 18.2994 5.20624 18.1794C4.07963 18.0549 3.22412 17.3772 2.91303 16.4061C2.67526 15.6706 2.78859 15.1973 2.78859 15.1973C2.78859 15.1973 4.85959 15.7462 6.02175 15.6151C7.48167 15.4484 8.46162 14.5307 8.46162 14.5307C8.46162 14.5307 9.33713 15.0307 11.277 14.864C12.9458 14.7173 13.7347 13.8196 13.7347 13.8196Z"
|
||||
fill="url(#paint0_radial_811_5475)"
|
||||
/>
|
||||
<path
|
||||
d="M24.8653 18.2661C24.6386 18.1394 23.832 18.8927 23.3787 19.346C23.1009 19.6238 22.2165 20.3504 22.0965 21.0504C21.7988 22.7703 23.7698 23.1614 24.552 22.3637C25.143 21.7615 25.0364 20.586 25.0364 20.1904C25.0386 19.6416 25.1475 18.4216 24.8653 18.2661Z"
|
||||
fill="#52C0EE"
|
||||
/>
|
||||
<path
|
||||
d="M8.67058 19.1904C8.45504 19.0659 7.68174 19.7948 7.24621 20.237C6.97956 20.5058 6.13294 21.2103 6.01294 21.8947C5.71962 23.5723 7.5973 23.9657 8.34837 23.1924C8.91501 22.608 8.82168 21.4591 8.82391 21.0725C8.82613 20.537 8.93723 19.3459 8.67058 19.1904Z"
|
||||
fill="#52C0EE"
|
||||
/>
|
||||
<path
|
||||
d="M12.2548 24.1634C12.0126 24.0723 11.1971 24.8145 10.7438 25.2678C10.466 25.5456 9.64386 26.2566 9.52386 26.9566C9.2261 28.6765 11.1349 29.0832 11.9171 28.2854C12.5081 27.6832 12.4104 26.5077 12.4015 26.1122C12.3793 25.0812 12.4215 24.2256 12.2548 24.1634Z"
|
||||
fill="#52C0EE"
|
||||
/>
|
||||
<path
|
||||
d="M23.5054 20.4926C23.1943 20.3304 22.8165 20.3993 22.5765 20.8992C22.3365 21.3992 22.5343 21.797 22.7854 21.9103C23.0743 22.0436 23.432 21.9214 23.672 21.5147C23.912 21.1081 23.7654 20.6281 23.5054 20.4926Z"
|
||||
fill="#B2E6FE"
|
||||
/>
|
||||
<path
|
||||
d="M10.6682 26.4279C10.3482 26.3168 9.99715 26.4345 9.83716 26.9478C9.67717 27.4611 9.92382 27.8122 10.1794 27.8878C10.4749 27.9744 10.7993 27.8078 10.9727 27.3834C11.1438 26.9589 10.9349 26.5212 10.6682 26.4279Z"
|
||||
fill="#B2E6FE"
|
||||
/>
|
||||
<path
|
||||
d="M7.12613 21.3258C6.78837 21.2325 6.43283 21.3769 6.30395 21.9169C6.17507 22.4569 6.45061 22.8035 6.71948 22.8613C7.03058 22.9302 7.35278 22.7369 7.50389 22.288C7.65277 21.8436 7.41056 21.4036 7.12613 21.3258Z"
|
||||
fill="#B2E6FE"
|
||||
/>
|
||||
<defs>
|
||||
<radialGradient
|
||||
id="paint0_radial_811_5475"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientTransform="matrix(0.18837 -6.53799 9.79456 0.282554 16.401 18.5051)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.1934" stopColor="#FFE366" />
|
||||
<stop offset="0.3305" stopColor="#EDDD82" />
|
||||
<stop offset="0.5709" stopColor="#D0D4AD" />
|
||||
<stop offset="0.7589" stopColor="#BFCFC7" />
|
||||
<stop offset="0.8699" stopColor="#B8CDD1" />
|
||||
</radialGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 5.4 KiB |
@@ -1,3 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
|
||||
<image width="100" height="100" href="data:image/webp;base64,UklGRqQEAABXRUJQVlA4IJgEAACQJQCdASrhAOEAPm02mUikIyKhJVZ4CIANiWVu4XPw+dx/zz8Y9kq6J+Kv5T8nN4H6MM2Hod6xffvuy+jnoX8wD9Zekj5gP2S/YDsAegB+ovWUeg9/GfPP/cn4dP2/9JnVbPMU2BWP/6ShYsB/INjyXNw88GLWEjqTk7r3B1Jyd0Q60mjXFTuiHUwvdoU9QiTOwXp8O6KdYSPcXtgV/YIjrUu+9aTZYQwhSBOiHuLpesMKhiEeAz91s6AFmAINNYJeBpyuf9KScX17NiZWEjlmJ+PsBWx4Be/O9Vng220j2g97IwDP4CHt7ScFK+ikuWNp4z1G60XIv5OXis+Sp/e/QkOjbE0qKut3VrL3hQfSVuuLp2XQBvxPw8eyiMbE05kQ6k5O6Ml23DSy92Ix+s9tw0svcgAA/Jba8JQ+0uNQuBewKWqirnyDRkjzUXXOeoNxj0dA3QpklBe8QAAMu8geavhwzfTJ2OUqOrwpg8PXz6sktEQLDnTlNclju/V9f4Ny3CgDYwBvljjNaFA8PR18BGAyzmVuHsa0hgskCkgbT6nZi/Vr9AXTc/UZtUS8Z3pKg4hMjWCXdMx0JCsu6v+8ghZmHf/y0VcGbDiugOy3WH36R3zSOIJh1NjhcNYlKfWilZD5ohEfCaTn/D4u3kgC/kLeLF06X5oe/zKySrWWEPQe965rTf4pcEGBZQGkauwtkLPJWVcHoNzJshb6gdVqs0hye7fI5nFTsbJaZmqz8kXeMSEV4zhApVaDyl3Q2P/ATfChzCP87kOBxeVrhSkNkMWS/e1j58dVzpyTubHOt04Z2NKpTlEjBganrkKIYvtuYWGoJWEmtMVtM9TuzNWa3jKX3kWc9zWmQHHawveaHnJn4HJXQwbTC4ZIlxa5el1b8GgqUXzOeF9LIWA3eb7i34/Q3GXZcbs5DLsukl03KclLo5hW5uf04JjGQR5NXHd8as68c3K9Kze3147QuIn1ytP7uE9us639xDuAiHDPr9JiVn62mv80Q2ErNLD+d6VYdhm6J2PCLY5/9mHTExTnIBzRtNoErP6qmMiCXmSM4sX7s14u3yhaE+dVjX1xtxUWtFjH0GWCN/1JNmjtElHq+FjqPWse7tIMvkKm7xFBgoM8yUJGxY+HviWNkOHipLPh7RJ3KehnEZTPIhR5GHlhlATLx+QUQ61+WwhffiT45KPRiK9v8Cd+1YShd6ZGqFSqsxt4FuMSvpMUB+Rd51pUUPuwXHzpXcyz6BXkWw0Mi3jbfH3kxet2Bsqzu0XWmJ2Oj097qP4ayirQzJewKHo8LS0cFG73Lvox2tmD7ZhuRRVmGjyz62sUMVDSzIUN2BHghed2IdUHVnAkof5DrdZIx+umeHp2XSityHdbTywo7r1+ve5+vPk8hhxhISBjbK+qikfPVnhJ8R/L7KUUqchh3AGKVmRGVgXh09MDaEiW/qqPYzP+MBiKfM5eTcHuGLymyFvT2em/o58zIcp7nKtDr90BsurnuOA0XaH3ZPDpSFAUN3HL9Zj7mpQtcFMfpCaC1VMv0l2UvY8xo/6qYAG6UEbsogAAAAA=" />
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>
|
||||
|
Before Width: | Height: | Size: 3.0 KiB |
@@ -1,16 +0,0 @@
|
||||
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 92.2 65" style="enable-background:new 0 0 92.2 65;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<metadata>
|
||||
<sfw xmlns="ns_sfw;">
|
||||
<slices>
|
||||
</slices>
|
||||
<sliceSourceBounds bottomLeftOrigin="true" height="65" width="92.2" x="-43.7" y="-98">
|
||||
</sliceSourceBounds>
|
||||
</sfw>
|
||||
</metadata>
|
||||
<path class="st0" d="M66.5,0H52.4l25.7,65h14.1L66.5,0z M25.7,0L0,65h14.4l5.3-13.6h26.9L51.8,65h14.4L40.5,0C40.5,0,25.7,0,25.7,0z
|
||||
M24.3,39.3l8.8-22.8l8.8,22.8H24.3z">
|
||||
</path>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 714 B |
|
Before Width: | Height: | Size: 20 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>
|
||||
|
Before Width: | Height: | Size: 1.3 KiB |
@@ -1 +0,0 @@
|
||||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Claude</title><path d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z" fill="#D97757" fill-rule="nonzero"></path></svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.7 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><g transform="translate(25 25)"><rect width="60" height="55" fill="#fcd34d" rx="6"/><path stroke="#d97706" stroke-linecap="round" stroke-linejoin="round" stroke-width="4" d="m10 40 15-15 15 8 15-23"/><rect width="35" height="55" x="65" fill="#6ee7b7" rx="6"/><rect width="20" height="6" x="73" y="10" fill="#059669" rx="3"/><rect width="15" height="6" x="73" y="22" fill="#059669" opacity=".7" rx="3"/><rect width="18" height="6" x="73" y="34" fill="#059669" opacity=".7" rx="3"/><rect width="100" height="40" y="60" fill="#a5b4fc" rx="6"/><rect width="80" height="8" x="10" y="70" fill="#4f46e5" rx="4"/><rect width="50" height="8" x="20" y="83" fill="#6366f1" rx="4"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 826 B |
@@ -1 +0,0 @@
|
||||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>DeepSeek</title><path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z" fill="#4D6BFE"></path></svg>
|
||||
|
Before Width: | Height: | Size: 2.1 KiB |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#b31aab" d="m33.172 61.48.176 7.325 7.797 4.785-.176-7.324Zm19.031 30.504-.176-7.18-6.84-4.206c-.085-.059-.203-.145-.289-.203l.176 7.207Zm-24.355 9.688L10.012 90.715l-.438-18.367 8.758-3.746-.172-7.352-13.969 5.969c-1.074.46-1.714 1.441-1.687 2.566l.523 22.055c.032 1.125.73 2.25 1.836 2.941l21.383 13.149c.992.605 2.215.78 3.203.46a.8.8 0 0 0 .29-.113l13.124-5.593-7.129-4.383Zm0 0"/><path fill="#d163ce" d="M85.488 61.047c-.031-1.328-.843-2.625-2.125-3.43L57.38 41.672l-.813.344.176 7.726 20.57 12.63.493 20.648 7.86 4.812.433-.172ZM54.383 97.289 30.262 82.47l-.582-24.883 11-4.7-.203-8.562-17.082 7.293c-1.25.547-2.008 1.672-1.977 3l.668 29.18c.031 1.324.844 2.625 2.125 3.402l28.281 17.387c1.164.723 2.563.894 3.754.547.117-.028.234-.086.348-.145l16.703-7.12-8.32-5.102Zm0 0"/><path fill="#e13eaf" d="M122.234 40.633 85.98 18.343c-1.335-.808-2.937-1.038-4.304-.605-.145.028-.262.086-.406.145l-35.383 15.11c-1.426.605-2.297 1.902-2.27 3.429l.903 37.371c.03 1.527.96 2.996 2.445 3.89l36.254 22.262c1.336.805 2.937 1.035 4.277.606.145-.031.262-.09.406-.145l35.383-15.11c1.426-.605 2.297-1.933 2.27-3.433l-.875-37.367c-.028-1.473-.961-2.969-2.446-3.863M85.398 91.64 53.891 72.293l-.79-32.496 30.727-13.121 31.512 19.347.785 32.497Zm0 0"/></svg>
|
||||
|
Before Width: | Height: | Size: 1.3 KiB |