mirror of
https://github.com/SigNoz/signoz.git
synced 2026-04-06 12:10:29 +01:00
Compare commits
99 Commits
chore/use-
...
nv/4172
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb725ea4d9 | ||
|
|
d677973d56 | ||
|
|
b5b89bb678 | ||
|
|
b5eab118dd | ||
|
|
98d0bdfe49 | ||
|
|
a71006b662 | ||
|
|
769e36ec84 | ||
|
|
65bedcca3f | ||
|
|
1e7ddb0dbe | ||
|
|
f39e2183a3 | ||
|
|
43c9367ab5 | ||
|
|
596cb8adbb | ||
|
|
58a2737717 | ||
|
|
8263aed441 | ||
|
|
d3eb56f3da | ||
|
|
6e0c905977 | ||
|
|
8ee5b5f08e | ||
|
|
1325b0a1b3 | ||
|
|
76dc3b743b | ||
|
|
dfe0a9d147 | ||
|
|
f24c3d8e24 | ||
|
|
7dba4d7b64 | ||
|
|
2299cedeab | ||
|
|
6812d68d4d | ||
|
|
667a1e6d5d | ||
|
|
31306a57d8 | ||
|
|
839b3e29ee | ||
|
|
20ab7d4908 | ||
|
|
d5b72f9f0c | ||
|
|
9a3efa7704 | ||
|
|
fd45b4fad1 | ||
|
|
cea88bf51e | ||
|
|
6ead1cd52a | ||
|
|
7cb700428c | ||
|
|
a2dc410be6 | ||
|
|
66ce48434d | ||
|
|
7d3612c10a | ||
|
|
c82cd32f61 | ||
|
|
a3980e084c | ||
|
|
4319dd9cef | ||
|
|
92a5e9b9c9 | ||
|
|
408a914129 | ||
|
|
0e304b1d40 | ||
|
|
58a9be24d3 | ||
|
|
adf439fcf1 | ||
|
|
a1a54c4bb2 | ||
|
|
3c1961d3fc | ||
|
|
c3efa0660b | ||
|
|
183dd09082 | ||
|
|
a351373c49 | ||
|
|
8e7653b90d | ||
|
|
5c40d6b68b | ||
|
|
31115df41c | ||
|
|
869c3dccb2 | ||
|
|
c5d7a7ef8c | ||
|
|
544b87b254 | ||
|
|
e885fb98e5 | ||
|
|
be227eec43 | ||
|
|
13263c1f25 | ||
|
|
ccbf410d15 | ||
|
|
03b98ff824 | ||
|
|
2cdba0d11c | ||
|
|
84d2885530 | ||
|
|
b82dcc6138 | ||
|
|
a14d5847b9 | ||
|
|
d184746142 | ||
|
|
c335e17e1d | ||
|
|
433dd0b2d0 | ||
|
|
05e97e246a | ||
|
|
bddfe30f6c | ||
|
|
7a01a5250d | ||
|
|
09c98c830d | ||
|
|
0fbb90cc91 | ||
|
|
15f0787610 | ||
|
|
22ebc7732c | ||
|
|
cff18edf6e | ||
|
|
cb49c0bf3b | ||
|
|
1cb6f94d21 | ||
|
|
68155f374b | ||
|
|
696524509f | ||
|
|
705cdab38c | ||
|
|
ae9b881413 | ||
|
|
05f4e15d07 | ||
|
|
1653c6d725 | ||
|
|
070b4b7061 | ||
|
|
7f4c06edd6 | ||
|
|
6bed20b5b9 | ||
|
|
033bd3c9b8 | ||
|
|
d4c9a923fd | ||
|
|
387dcb529f | ||
|
|
7a4da7bcc5 | ||
|
|
b152fae3fa | ||
|
|
2ed766726c | ||
|
|
8767f6a57d | ||
|
|
22d8c7599b | ||
|
|
1019264272 | ||
|
|
c950d7e784 | ||
|
|
1e279e6193 | ||
|
|
d3a278c43e |
4
.github/workflows/integrationci.yaml
vendored
4
.github/workflows/integrationci.yaml
vendored
@@ -56,7 +56,6 @@ jobs:
|
||||
- postgres
|
||||
- sqlite
|
||||
sqlite-mode:
|
||||
- delete
|
||||
- wal
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
@@ -65,9 +64,6 @@ jobs:
|
||||
- v0.142.0
|
||||
postgres-version:
|
||||
- 15
|
||||
exclude:
|
||||
- sqlstore-provider: postgres
|
||||
sqlite-mode: wal
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
|
||||
1
frontend/public/Logos/hasura.svg
Normal file
1
frontend/public/Logos/hasura.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#1eb4d4" viewBox="0 0 24 24"><title>Hasura</title><path d="M23.558 8.172c.707-2.152.282-6.447-1.09-8.032a.42.42 0 0 0-.664.051l-1.69 2.59a1.32 1.32 0 0 1-1.737.276C16.544 1.885 14.354 1.204 12 1.204s-4.544.68-6.378 1.853a1.326 1.326 0 0 1-1.736-.276L2.196.191A.42.42 0 0 0 1.532.14C.16 1.728-.265 6.023.442 8.172c.236.716.3 1.472.16 2.207-.137.73-.276 1.61-.276 2.223C.326 18.898 5.553 24 11.997 24c6.447 0 11.671-5.105 11.671-11.398 0-.613-.138-1.494-.276-2.223a4.47 4.47 0 0 1 .166-2.207m-11.56 13.284c-4.984 0-9.036-3.96-9.036-8.827q0-.239.014-.473c.18-3.316 2.243-6.15 5.16-7.5 1.17-.546 2.481-.848 3.864-.848s2.69.302 3.864.85c2.917 1.351 4.98 4.187 5.16 7.501q.013.236.014.473c-.003 4.864-4.057 8.824-9.04 8.824m3.915-5.43-2.31-3.91-1.98-3.26a.26.26 0 0 0-.223-.125H9.508a.26.26 0 0 0-.227.13.25.25 0 0 0 .003.254l1.895 3.109-2.542 3.787a.25.25 0 0 0-.011.259.26.26 0 0 0 .23.132h1.905a.26.26 0 0 0 .218-.116l1.375-2.096 1.233 2.088a.26.26 0 0 0 .224.127h1.878c.094 0 .18-.049.224-.127a.24.24 0 0 0 0-.251z"/></svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
1
frontend/public/Logos/n8n.svg
Normal file
1
frontend/public/Logos/n8n.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="#ea4b71" viewBox="0 0 24 24"><title>n8n</title><path d="M21.474 5.684a2.53 2.53 0 0 0-2.447 1.895H16.13a2.526 2.526 0 0 0-2.492 2.11l-.103.624a1.26 1.26 0 0 1-1.246 1.055h-1.001a2.527 2.527 0 0 0-4.893 0H4.973a2.527 2.527 0 1 0 0 1.264h1.422a2.527 2.527 0 0 0 4.894 0h1a1.26 1.26 0 0 1 1.247 1.055l.103.623a2.526 2.526 0 0 0 2.492 2.111h.37a2.527 2.527 0 1 0 0-1.263h-.37a1.26 1.26 0 0 1-1.246-1.056l-.103-.623A2.52 2.52 0 0 0 13.96 12a2.52 2.52 0 0 0 .82-1.48l.104-.622a1.26 1.26 0 0 1 1.246-1.056h2.896a2.527 2.527 0 1 0 2.447-3.158m0 1.263a1.263 1.263 0 0 1 1.263 1.263 1.263 1.263 0 0 1-1.263 1.264A1.263 1.263 0 0 1 20.21 8.21a1.263 1.263 0 0 1 1.264-1.263m-18.948 3.79A1.263 1.263 0 0 1 3.79 12a1.263 1.263 0 0 1-1.264 1.263A1.263 1.263 0 0 1 1.263 12a1.263 1.263 0 0 1 1.263-1.263m6.316 0A1.263 1.263 0 0 1 10.105 12a1.263 1.263 0 0 1-1.263 1.263A1.263 1.263 0 0 1 7.58 12a1.263 1.263 0 0 1 1.263-1.263m10.106 3.79a1.263 1.263 0 0 1 1.263 1.263 1.263 1.263 0 0 1-1.263 1.263 1.263 1.263 0 0 1-1.264-1.263 1.263 1.263 0 0 1 1.263-1.264"/></svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
1
frontend/public/Logos/qwen.svg
Normal file
1
frontend/public/Logos/qwen.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 5.8 KiB |
@@ -18,7 +18,7 @@ import AppLayout from 'container/AppLayout';
|
||||
import Hex from 'crypto-js/enc-hex';
|
||||
import HmacSHA256 from 'crypto-js/hmac-sha256';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
@@ -212,6 +212,12 @@ function App(): JSX.Element {
|
||||
activeLicenseFetchError,
|
||||
]);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
useEffect(() => {
|
||||
window.Pylon?.('setTheme', isDarkMode ? 'dark' : 'light');
|
||||
}, [isDarkMode]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
pathname === ROUTES.ONBOARDING ||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"tags": [
|
||||
"quickstart"
|
||||
],
|
||||
"module": "apm",
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"apm",
|
||||
"application performance monitoring",
|
||||
@@ -22,6 +22,28 @@
|
||||
"imgUrl": "/Logos/quickstart.svg",
|
||||
"link": "/docs/cloud/quickstart/"
|
||||
},
|
||||
{
|
||||
"dataSource": "signoz-mcp-server",
|
||||
"label": "SigNoz MCP Server",
|
||||
"tags": [
|
||||
"quickstart"
|
||||
],
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"agent",
|
||||
"ai",
|
||||
"mcp",
|
||||
"mcp server",
|
||||
"model context protocol",
|
||||
"quickstart",
|
||||
"signoz",
|
||||
"signoz mcp",
|
||||
"signoz mcp server",
|
||||
"setup"
|
||||
],
|
||||
"imgUrl": "/Logos/signoz-brand-logo.svg",
|
||||
"link": "/docs/ai/signoz-mcp-server/"
|
||||
},
|
||||
{
|
||||
"dataSource": "migrate-from-datadog",
|
||||
"label": "From Datadog",
|
||||
@@ -1524,18 +1546,24 @@
|
||||
"link": "/docs/userguide/collect_docker_logs/"
|
||||
},
|
||||
{
|
||||
"dataSource": "vercel-logs",
|
||||
"label": "Vercel logs",
|
||||
"dataSource": "vercel",
|
||||
"label": "Vercel",
|
||||
"imgUrl": "/Logos/vercel.svg",
|
||||
"tags": [
|
||||
"apm/traces",
|
||||
"logs"
|
||||
],
|
||||
"module": "logs",
|
||||
"module": "home",
|
||||
"relatedSearchKeywords": [
|
||||
"collect vercel logs",
|
||||
"logging",
|
||||
"logs",
|
||||
"opentelemetry drains",
|
||||
"trace drain",
|
||||
"traces",
|
||||
"tracing",
|
||||
"vercel",
|
||||
"vercel drains",
|
||||
"vercel functions logs",
|
||||
"vercel log forwarding",
|
||||
"vercel log monitoring",
|
||||
@@ -1545,10 +1573,12 @@
|
||||
"vercel observability",
|
||||
"vercel opentelemetry integration",
|
||||
"vercel to otel",
|
||||
"vercel trace drain",
|
||||
"vercel traces",
|
||||
"vercel-logs"
|
||||
],
|
||||
"id": "vercel-logs",
|
||||
"link": "/docs/userguide/vercel_logs_to_signoz/"
|
||||
"link": "/docs/userguide/vercel-to-signoz/"
|
||||
},
|
||||
{
|
||||
"dataSource": "heroku-logs",
|
||||
@@ -4029,6 +4059,57 @@
|
||||
],
|
||||
"link": "/docs/pydantic-ai-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "qwen-observability",
|
||||
"label": "Qwen",
|
||||
"imgUrl": "/Logos/qwen.svg",
|
||||
"tags": [
|
||||
"LLM Monitoring"
|
||||
],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"alibaba cloud",
|
||||
"dashscope",
|
||||
"llm",
|
||||
"llm monitoring",
|
||||
"monitoring",
|
||||
"observability",
|
||||
"otel qwen",
|
||||
"qwen",
|
||||
"qwen logs",
|
||||
"qwen metrics",
|
||||
"qwen monitoring",
|
||||
"qwen observability",
|
||||
"qwen response time",
|
||||
"qwen traces"
|
||||
],
|
||||
"id": "qwen-observability",
|
||||
"link": "/docs/qwen-observability/"
|
||||
},
|
||||
{
|
||||
"dataSource": "n8n-cloud",
|
||||
"label": "n8n Cloud",
|
||||
"imgUrl": "/Logos/n8n.svg",
|
||||
"tags": [
|
||||
"LLM Monitoring"
|
||||
],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"llm monitoring",
|
||||
"monitoring",
|
||||
"n8n",
|
||||
"n8n cloud",
|
||||
"n8n monitoring",
|
||||
"n8n observability",
|
||||
"n8n traces",
|
||||
"observability",
|
||||
"otel n8n",
|
||||
"workflow monitoring",
|
||||
"workflow traces"
|
||||
],
|
||||
"id": "n8n-cloud",
|
||||
"link": "/docs/n8n-monitoring/"
|
||||
},
|
||||
{
|
||||
"dataSource": "mastra-monitoring",
|
||||
"label": "Mastra",
|
||||
@@ -5158,6 +5239,31 @@
|
||||
"id": "microsoft-sql-server",
|
||||
"link": "/docs/integrations/sql-server/"
|
||||
},
|
||||
{
|
||||
"dataSource": "hasura",
|
||||
"label": "Hasura",
|
||||
"imgUrl": "/Logos/hasura.svg",
|
||||
"tags": [
|
||||
"database"
|
||||
],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": [
|
||||
"database",
|
||||
"graphql",
|
||||
"graphql engine",
|
||||
"hasura",
|
||||
"hasura graphql",
|
||||
"hasura logs",
|
||||
"hasura metrics",
|
||||
"hasura monitoring",
|
||||
"hasura observability",
|
||||
"hasura traces",
|
||||
"opentelemetry hasura",
|
||||
"telemetry"
|
||||
],
|
||||
"id": "hasura",
|
||||
"link": "/docs/integrations/opentelemetry-hasura/"
|
||||
},
|
||||
{
|
||||
"dataSource": "supabase",
|
||||
"label": "Supabase",
|
||||
|
||||
37
go.mod
37
go.mod
@@ -1,6 +1,6 @@
|
||||
module github.com/SigNoz/signoz
|
||||
|
||||
go 1.25.0
|
||||
go 1.25.7
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.2
|
||||
@@ -19,6 +19,7 @@ require (
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.29.2
|
||||
github.com/go-openapi/strfmt v0.25.0
|
||||
github.com/go-playground/validator/v10 v10.27.0
|
||||
github.com/go-redis/redismock/v9 v9.2.0
|
||||
github.com/go-viper/mapstructure/v2 v2.5.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
@@ -27,8 +28,8 @@ require (
|
||||
github.com/gorilla/handlers v1.5.1
|
||||
github.com/gorilla/mux v1.8.1
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/huandu/go-sqlbuilder v1.39.1
|
||||
github.com/jackc/pgx/v5 v5.8.0
|
||||
github.com/json-iterator/go v1.1.13-0.20220915233716-71ac16282d12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.3.2
|
||||
@@ -38,6 +39,7 @@ require (
|
||||
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20251027165255-0f8f255e5f6c
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/perses/perses v0.53.1
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.31.0
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
@@ -75,18 +77,18 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.40.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.1
|
||||
golang.org/x/crypto v0.47.0
|
||||
golang.org/x/crypto v0.48.0
|
||||
golang.org/x/exp v0.0.0-20260112195511-716be5621a96
|
||||
golang.org/x/net v0.49.0
|
||||
golang.org/x/oauth2 v0.34.0
|
||||
golang.org/x/net v0.50.0
|
||||
golang.org/x/oauth2 v0.35.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.33.0
|
||||
golang.org/x/text v0.34.0
|
||||
gonum.org/v1/gonum v0.17.0
|
||||
google.golang.org/api v0.265.0
|
||||
google.golang.org/protobuf v1.36.11
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.35.0
|
||||
k8s.io/apimachinery v0.35.2
|
||||
modernc.org/sqlite v1.40.1
|
||||
)
|
||||
|
||||
@@ -125,12 +127,14 @@ require (
|
||||
github.com/go-openapi/swag/yamlutils v0.25.4 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.2 // indirect
|
||||
github.com/hashicorp/go-metrics v0.5.4 // indirect
|
||||
github.com/huandu/go-clone v1.7.3 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/muhlemmer/gu v0.3.1 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/perses/common v0.30.2 // indirect
|
||||
github.com/prometheus/client_golang/exp v0.0.0-20260108101519-fb0838f53562 // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
@@ -139,6 +143,8 @@ require (
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
github.com/zitadel/oidc/v3 v3.45.4 // indirect
|
||||
github.com/zitadel/schema v1.3.2 // indirect
|
||||
go.opentelemetry.io/collector/client v1.50.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configoptional v1.50.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.50.0 // indirect
|
||||
@@ -208,7 +214,7 @@ require (
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/golang/snappy v1.0.0 // indirect
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.1 // indirect
|
||||
github.com/google/cel-go v0.27.0 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.11 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.16.0 // indirect
|
||||
@@ -226,7 +232,7 @@ require (
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.4 // indirect
|
||||
github.com/huandu/xstrings v1.4.0 // indirect
|
||||
github.com/huandu/xstrings v1.5.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
@@ -298,7 +304,6 @@ require (
|
||||
github.com/spf13/cast v1.10.0 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/spf13/viper v1.20.1 // indirect
|
||||
github.com/stoewer/go-strcase v1.3.0 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/swaggest/openapi-go v0.2.60
|
||||
@@ -376,15 +381,15 @@ require (
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.32.0 // indirect
|
||||
golang.org/x/sys v0.40.0 // indirect
|
||||
golang.org/x/mod v0.33.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
golang.org/x/tools v0.42.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260203192932-546029d2fa20 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260128011058-8636f8732409
|
||||
google.golang.org/grpc v1.78.0 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.35.0 // indirect
|
||||
k8s.io/client-go v0.35.2 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20251002143259-bc988d571ff4 // indirect
|
||||
)
|
||||
|
||||
81
go.sum
81
go.sum
@@ -489,8 +489,8 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.1.3 h1:CVpQJjYgC4VbzxeGVHfvZrv1ctoYCAI8vbl07Fcxlyg=
|
||||
github.com/google/btree v1.1.3/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4=
|
||||
github.com/google/cel-go v0.26.1 h1:iPbVVEdkhTX++hpe3lzSk7D3G3QSYqLGoHOcEio+UXQ=
|
||||
github.com/google/cel-go v0.26.1/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PUIaryMM=
|
||||
github.com/google/cel-go v0.27.0 h1:e7ih85+4qVrBuqQWTW4FKSqZYokVuc3HnhH5keboFTo=
|
||||
github.com/google/cel-go v0.27.0/go.mod h1:tTJ11FWqnhw5KKpnWpvW9CJC3Y9GK4EIS0WXnBbebzw=
|
||||
github.com/google/gnostic-models v0.7.0 h1:qwTtogB15McXDaNqTZdzPJRHvaVJlAl+HVQnLmJEJxo=
|
||||
github.com/google/gnostic-models v0.7.0/go.mod h1:whL5G0m6dmc5cPxKc5bdKdEN3UjI7OUGxBlw57miDrQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
@@ -654,12 +654,15 @@ github.com/hetznercloud/hcloud-go/v2 v2.36.0 h1:HlLL/aaVXUulqe+rsjoJmrxKhPi1MflL
|
||||
github.com/hetznercloud/hcloud-go/v2 v2.36.0/go.mod h1:MnN/QJEa/RYNQiiVoJjNHPntM7Z1wlYPgJ2HA40/cDE=
|
||||
github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs=
|
||||
github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E=
|
||||
github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U=
|
||||
github.com/huandu/go-assert v1.1.6 h1:oaAfYxq9KNDi9qswn/6aE0EydfxSa+tWZC1KabNitYs=
|
||||
github.com/huandu/go-assert v1.1.6/go.mod h1:JuIfbmYG9ykwvuxoJ3V8TB5QP+3+ajIA54Y44TmkMxs=
|
||||
github.com/huandu/go-sqlbuilder v1.35.0 h1:ESvxFHN8vxCTudY1Vq63zYpU5yJBESn19sf6k4v2T5Q=
|
||||
github.com/huandu/go-sqlbuilder v1.35.0/go.mod h1:mS0GAtrtW+XL6nM2/gXHRJax2RwSW1TraavWDFAc1JA=
|
||||
github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU=
|
||||
github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/huandu/go-clone v1.7.3 h1:rtQODA+ABThEn6J5LBTppJfKmZy/FwfpMUWa8d01TTQ=
|
||||
github.com/huandu/go-clone v1.7.3/go.mod h1:ReGivhG6op3GYr+UY3lS6mxjKp7MIGTknuU5TbTVaXE=
|
||||
github.com/huandu/go-sqlbuilder v1.39.1 h1:uUaj41yLNTQBe7ojNF6Im1RPbHCN4zCjMRySTEC2ooI=
|
||||
github.com/huandu/go-sqlbuilder v1.39.1/go.mod h1:zdONH67liL+/TvoUMwnZP/sUYGSSvHh9psLe/HpXn8E=
|
||||
github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI=
|
||||
github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
|
||||
github.com/iancoleman/orderedmap v0.3.0 h1:5cbR2grmZR/DiVt+VJopEhtVs9YGInGIxAoMJn+Ichc=
|
||||
github.com/iancoleman/orderedmap v0.3.0/go.mod h1:XuLcCUkdL5owUCQeF2Ue9uuw1EptkJDkXXS7VoV7XGE=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
@@ -672,8 +675,8 @@ github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsI
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.7.6 h1:rWQc5FwZSPX58r1OQmkuaNicxdmExaEz5A2DO2hUuTk=
|
||||
github.com/jackc/pgx/v5 v5.7.6/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M=
|
||||
github.com/jackc/pgx/v5 v5.8.0 h1:TYPDoleBBme0xGSAX3/+NujXXtpZn9HBONkQC7IEZSo=
|
||||
github.com/jackc/pgx/v5 v5.8.0/go.mod h1:QVeDInX2m9VyzvNeiCJVjCkNFqzsNb43204HshNSZKw=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/jessevdk/go-flags v1.6.1 h1:Cvu5U8UGrLay1rZfv/zP7iLpSHGUZ/Ou68T0iX1bBK4=
|
||||
@@ -818,6 +821,8 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee h1:W5t00kpgFdJifH4BDsTlE89Zl93FEloxaWZfGcifgq8=
|
||||
github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/muhlemmer/gu v0.3.1 h1:7EAqmFrW7n3hETvuAdmFmn4hS8W+z3LgKtrnow+YzNM=
|
||||
github.com/muhlemmer/gu v0.3.1/go.mod h1:YHtHR+gxM+bKEIIs7Hmi9sPT3ZDUvTN/i88wQpZkrdM=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
@@ -827,9 +832,11 @@ github.com/natefinch/wrap v0.2.0 h1:IXzc/pw5KqxJv55gV0lSOcKHYuEZPGbQrOOXr/bamRk=
|
||||
github.com/natefinch/wrap v0.2.0/go.mod h1:6gMHlAl12DwYEfKP3TkuykYUfLSEAvHw67itm4/KAS8=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/nexucis/lamenv v0.5.2 h1:tK/u3XGhCq9qIoVNcXsK9LZb8fKopm0A5weqSRvHd7M=
|
||||
github.com/nexucis/lamenv v0.5.2/go.mod h1:HusJm6ltmmT7FMG8A750mOLuME6SHCsr2iFYxp5fFi0=
|
||||
github.com/npillmayer/nestext v0.1.3/go.mod h1:h2lrijH8jpicr25dFY+oAJLyzlya6jhnuG+zWp9L0Uk=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY=
|
||||
github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/oklog/run v1.2.0 h1:O8x3yXwah4A73hJdlrwo/2X6J62gE5qTMusH0dvz60E=
|
||||
github.com/oklog/run v1.2.0/go.mod h1:mgDbKRSwPhJfesJ4PntqFUbKQRZ50NgmZTSPlFA0YFk=
|
||||
@@ -891,6 +898,10 @@ github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCko
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4/go.mod h1:2gIqNv+qfxSVS7cM2xJQKtLSTLUE9V8t9Stt+h56mCY=
|
||||
github.com/perses/common v0.30.2 h1:RAiVxUpX76lTCb4X7pfcXSvYdXQmZwKi4oDKAEO//u0=
|
||||
github.com/perses/common v0.30.2/go.mod h1:DFtur1QPah2/ChXbKKhw7djYdwNgz27s5fPKpiK0Xao=
|
||||
github.com/perses/perses v0.53.1 h1:9VY/6p9QWrZwPSV7qiwTMSOsgcB37Lb1AXKT0ORXc6I=
|
||||
github.com/perses/perses v0.53.1/go.mod h1:ro8fsgBkHYOdrL/MV+fdP9mflKzYCy/+gcbxiaReI/A=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.23 h1:oJE7T90aYBGtFNrI8+KbETnPymobAhzRrR8Mu8n1yfU=
|
||||
github.com/pierrec/lz4/v4 v4.1.23/go.mod h1:EoQMVJgeeEOMsCqCzqFm2O0cJvljX2nGZjcRIPL34O4=
|
||||
@@ -1049,8 +1060,6 @@ github.com/srikanthccv/ClickHouse-go-mock v0.13.0 h1:/b7DQphGkh29ocNtLh4DGmQxQYA
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.13.0/go.mod h1:LiiyBUdXNwB/1DE9rgK/8q9qjVYsTzg6WXQ/3mU3TeY=
|
||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1 h1:Y/PcAgM7DPYMNqum0MLv4n1mF9ieuevzcCIZYQfm3Ts=
|
||||
github.com/stackitcloud/stackit-sdk-go/core v0.21.1/go.mod h1:osMglDby4csGZ5sIfhNyYq1bS1TxIdPY88+skE/kkmI=
|
||||
github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs=
|
||||
github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
@@ -1150,6 +1159,10 @@ github.com/zeebo/assert v1.3.1 h1:vukIABvugfNMZMQO1ABsyQDJDTVQbn+LWSMy1ol1h6A=
|
||||
github.com/zeebo/assert v1.3.1/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
github.com/zitadel/oidc/v3 v3.45.4 h1:GKyWaPRVQ8sCu9XgJ3NgNGtG52FzwVJpzXjIUG2+YrI=
|
||||
github.com/zitadel/oidc/v3 v3.45.4/go.mod h1:XALmFXS9/kSom9B6uWin1yJ2WTI/E4Ti5aXJdewAVEs=
|
||||
github.com/zitadel/schema v1.3.2 h1:gfJvt7dOMfTmxzhscZ9KkapKo3Nei3B6cAxjav+lyjI=
|
||||
github.com/zitadel/schema v1.3.2/go.mod h1:IZmdfF9Wu62Zu6tJJTH3UsArevs3Y4smfJIj3L8fzxw=
|
||||
go.etcd.io/etcd/api/v3 v3.5.4/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.4/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||
go.etcd.io/etcd/client/v2 v2.305.4/go.mod h1:Ud+VUwIi9/uQHOMA+4ekToJ12lTxlv0zB/+DHwTGEbU=
|
||||
@@ -1385,8 +1398,8 @@ golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm
|
||||
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.47.0 h1:V6e3FRj+n4dbpw86FJ8Fv7XVOql7TEwpHapKoMJ/GO8=
|
||||
golang.org/x/crypto v0.47.0/go.mod h1:ff3Y9VzzKbwSSEzWqJsJVBnWmRwRSHt/6Op5n9bQc4A=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
@@ -1424,8 +1437,8 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
|
||||
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@@ -1474,8 +1487,8 @@ golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su
|
||||
golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.49.0 h1:eeHFmOGUTtaaPSGNmjBKpbng9MulQsJURQUAfUwY++o=
|
||||
golang.org/x/net v0.49.0/go.mod h1:/ysNB2EvaqvesRkuLAyjI1ycPZlQHM3q01F02UY/MV8=
|
||||
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
|
||||
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -1496,8 +1509,8 @@ golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ
|
||||
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
|
||||
golang.org/x/oauth2 v0.34.0 h1:hqK/t4AKgbqWkdkcAeI8XLmbK+4m4G5YeQRrmiotGlw=
|
||||
golang.org/x/oauth2 v0.34.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/oauth2 v0.35.0 h1:Mv2mzuHuZuY2+bkyWXIHMfhNdJAdwW3FuWeCPYN5GVQ=
|
||||
golang.org/x/oauth2 v0.35.0/go.mod h1:lzm5WQJQwKZ3nwavOZ3IS5Aulzxi68dUSgRHujetwEA=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -1598,12 +1611,12 @@ golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.40.0 h1:DBZZqJ2Rkml6QMQsZywtnjnnGvHza6BTfYFWY9kjEWQ=
|
||||
golang.org/x/sys v0.40.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.39.0 h1:RclSuaJf32jOqZz74CkPA9qFuVTX7vhLlpfj/IGWlqY=
|
||||
golang.org/x/term v0.39.0/go.mod h1:yxzUCTP/U+FzoxfdKmLaA0RV1WgE0VY7hXBwKtY/4ww=
|
||||
golang.org/x/term v0.40.0 h1:36e4zGLqU4yhjlmxEaagx2KuYbJq3EwY8K943ZsHcvg=
|
||||
golang.org/x/term v0.40.0/go.mod h1:w2P8uVp06p2iyKKuvXIm7N/y0UCRt3UfJTfZ7oOpglM=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@@ -1614,8 +1627,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
|
||||
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
@@ -1678,8 +1691,8 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
|
||||
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated h1:o+aZ1BOj6Hsx/GBdJO/s815sqftjSnrZZwyYTHODvtk=
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated/go.mod h1:qM63CriJ961IHWmnWa9CjZnBndniPt4a3CK0PVB9bIg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -1915,12 +1928,12 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
k8s.io/api v0.35.0 h1:iBAU5LTyBI9vw3L5glmat1njFK34srdLmktWwLTprlY=
|
||||
k8s.io/api v0.35.0/go.mod h1:AQ0SNTzm4ZAczM03QH42c7l3bih1TbAXYo0DkF8ktnA=
|
||||
k8s.io/apimachinery v0.35.0 h1:Z2L3IHvPVv/MJ7xRxHEtk6GoJElaAqDCCU0S6ncYok8=
|
||||
k8s.io/apimachinery v0.35.0/go.mod h1:jQCgFZFR1F4Ik7hvr2g84RTJSZegBc8yHgFWKn//hns=
|
||||
k8s.io/client-go v0.35.0 h1:IAW0ifFbfQQwQmga0UdoH0yvdqrbwMdq9vIFEhRpxBE=
|
||||
k8s.io/client-go v0.35.0/go.mod h1:q2E5AAyqcbeLGPdoRB+Nxe3KYTfPce1Dnu1myQdqz9o=
|
||||
k8s.io/api v0.35.2 h1:tW7mWc2RpxW7HS4CoRXhtYHSzme1PN1UjGHJ1bdrtdw=
|
||||
k8s.io/api v0.35.2/go.mod h1:7AJfqGoAZcwSFhOjcGM7WV05QxMMgUaChNfLTXDRE60=
|
||||
k8s.io/apimachinery v0.35.2 h1:NqsM/mmZA7sHW02JZ9RTtk3wInRgbVxL8MPfzSANAK8=
|
||||
k8s.io/apimachinery v0.35.2/go.mod h1:jQCgFZFR1F4Ik7hvr2g84RTJSZegBc8yHgFWKn//hns=
|
||||
k8s.io/client-go v0.35.2 h1:YUfPefdGJA4aljDdayAXkc98DnPkIetMl4PrKX97W9o=
|
||||
k8s.io/client-go v0.35.2/go.mod h1:4QqEwh4oQpeK8AaefZ0jwTFJw/9kIjdQi0jpKeYvz7g=
|
||||
k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk=
|
||||
k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE=
|
||||
k8s.io/kube-openapi v0.0.0-20250910181357-589584f1c912 h1:Y3gxNAuB0OBLImH611+UDZcmKS3g6CthxToOb37KgwE=
|
||||
|
||||
@@ -279,7 +279,6 @@ func (store *store) SoftDeleteUser(ctx context.Context, orgID string, id string)
|
||||
_, err = tx.NewUpdate().
|
||||
Model(new(types.User)).
|
||||
Set("status = ?", types.UserStatusDeleted).
|
||||
Set("deleted_at = ?", now).
|
||||
Set("updated_at = ?", now).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", id).
|
||||
|
||||
@@ -124,8 +124,12 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any
|
||||
continue
|
||||
}
|
||||
|
||||
stepInterval, err := req.StepIntervalForQuery(name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
funcs := []qbtypes.Function{{Name: qbtypes.FunctionNameFillZero}}
|
||||
funcs = q.prepareFillZeroArgsWithStep(funcs, req, req.StepIntervalForQuery(name))
|
||||
funcs = q.prepareFillZeroArgsWithStep(funcs, req, stepInterval)
|
||||
// empty time series if it doesn't exist
|
||||
tsData, ok := typedResults[name].Value.(*qbtypes.TimeSeriesData)
|
||||
if !ok {
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||
@@ -73,22 +72,12 @@ func newProvider(
|
||||
traceFieldMapper := telemetrytraces.NewFieldMapper()
|
||||
traceConditionBuilder := telemetrytraces.NewConditionBuilder(traceFieldMapper)
|
||||
|
||||
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
|
||||
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
|
||||
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
|
||||
settings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
)
|
||||
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
|
||||
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
traceFieldMapper,
|
||||
traceConditionBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
traceAggExprRewriter,
|
||||
telemetryStore,
|
||||
)
|
||||
@@ -99,22 +88,13 @@ func newProvider(
|
||||
telemetryMetadataStore,
|
||||
traceFieldMapper,
|
||||
traceConditionBuilder,
|
||||
traceStmtBuilder, // Pass the regular trace statement builder
|
||||
resourceFilterStmtBuilder, // Pass the resource filter statement builder
|
||||
traceStmtBuilder, // Pass the regular trace statement builder
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
// Create log statement builder
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
settings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
telemetryMetadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
settings,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
@@ -127,7 +107,6 @@ func newProvider(
|
||||
telemetryMetadataStore,
|
||||
logFieldMapper,
|
||||
logConditionBuilder,
|
||||
logResourceFilterStmtBuilder,
|
||||
logAggExprRewriter,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
|
||||
@@ -14,10 +14,11 @@ func ApplyHavingClause(result []*v3.Result, queryRangeParams *v3.QueryRangeParam
|
||||
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
|
||||
|
||||
// apply having clause for metrics and formula
|
||||
if builderQueries != nil &&
|
||||
(builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics ||
|
||||
builderQueries[result.QueryName].QueryName != builderQueries[result.QueryName].Expression) {
|
||||
havingClause := builderQueries[result.QueryName].Having
|
||||
builderQuery := builderQueries[result.QueryName]
|
||||
if builderQuery != nil &&
|
||||
(builderQuery.DataSource == v3.DataSourceMetrics ||
|
||||
builderQuery.QueryName != builderQuery.Expression) {
|
||||
havingClause := builderQuery.Having
|
||||
|
||||
for i := 0; i < len(result.Series); i++ {
|
||||
for j := 0; j < len(result.Series[i].Points); j++ {
|
||||
|
||||
@@ -312,6 +312,72 @@ func TestApplyHavingCaluse(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "query not in builder queries should not panic",
|
||||
results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Value: 1.0},
|
||||
{Value: 2.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
params: &v3.QueryRangeParamsV3{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{},
|
||||
},
|
||||
},
|
||||
want: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Value: 1.0},
|
||||
{Value: 2.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nil builder queries should not panic",
|
||||
results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Value: 1.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
params: &v3.QueryRangeParamsV3{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
BuilderQueries: nil,
|
||||
},
|
||||
},
|
||||
want: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
Series: []*v3.Series{
|
||||
{
|
||||
Points: []v3.Point{
|
||||
{Value: 1.0},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
@@ -66,19 +65,8 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
|
||||
}
|
||||
metadataStore.KeysMap = keysMap
|
||||
|
||||
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
|
||||
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
|
||||
|
||||
logFieldMapper := telemetrylogs.NewFieldMapper()
|
||||
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
|
||||
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
providerSettings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
metadataStore,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
logAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
providerSettings,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
@@ -91,7 +79,6 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
|
||||
metadataStore,
|
||||
logFieldMapper,
|
||||
logConditionBuilder,
|
||||
logResourceFilterStmtBuilder,
|
||||
logAggExprRewriter,
|
||||
telemetrylogs.DefaultFullTextColumn,
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
@@ -127,22 +114,12 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM
|
||||
traceFieldMapper := telemetrytraces.NewFieldMapper()
|
||||
traceConditionBuilder := telemetrytraces.NewConditionBuilder(traceFieldMapper)
|
||||
|
||||
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
|
||||
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
|
||||
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
|
||||
providerSettings,
|
||||
resourceFilterFieldMapper,
|
||||
resourceFilterConditionBuilder,
|
||||
metadataStore,
|
||||
)
|
||||
|
||||
traceAggExprRewriter := querybuilder.NewAggExprRewriter(providerSettings, nil, traceFieldMapper, traceConditionBuilder, nil)
|
||||
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
|
||||
providerSettings,
|
||||
metadataStore,
|
||||
traceFieldMapper,
|
||||
traceConditionBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
traceAggExprRewriter,
|
||||
telemetryStore,
|
||||
)
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
package resourcefilter
|
||||
|
||||
const (
|
||||
TracesDBName = "signoz_traces"
|
||||
TraceResourceV3TableName = "distributed_traces_v3_resource"
|
||||
LogsDBName = "signoz_logs"
|
||||
LogsResourceV2TableName = "distributed_logs_v2_resource"
|
||||
)
|
||||
@@ -104,8 +104,15 @@ func extractCHOriginFieldFromQuery(query string) (string, error) {
|
||||
return "", errors.NewInternalf(errors.CodeInternal, "failed to parse origin field from query: %s", err.Error())
|
||||
}
|
||||
|
||||
if len(stmts) == 0 {
|
||||
return "", errors.NewInternalf(errors.CodeInternal, "no statements found in query")
|
||||
}
|
||||
|
||||
// Get the first statement which should be a SELECT
|
||||
selectStmt := stmts[0].(*parser.SelectQuery)
|
||||
selectStmt, ok := stmts[0].(*parser.SelectQuery)
|
||||
if !ok {
|
||||
return "", errors.NewInternalf(errors.CodeInternal, "expected SELECT query, got %T", stmts[0])
|
||||
}
|
||||
|
||||
// If query has multiple select items, return blank string as we don't expect multiple select items
|
||||
if len(selectStmt.SelectItems) > 1 {
|
||||
|
||||
@@ -2,6 +2,7 @@ package queryparser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"strings"
|
||||
|
||||
@@ -23,7 +24,15 @@ func New(settings factory.ProviderSettings) QueryParser {
|
||||
}
|
||||
}
|
||||
|
||||
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType qbtypes.QueryType, query string) (*queryfilterextractor.FilterResult, error) {
|
||||
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType qbtypes.QueryType, query string) (result *queryfilterextractor.FilterResult, err error) {
|
||||
// the third-party clickhouse sql parser can panic on certain inputs, recover gracefully
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
result = nil
|
||||
err = errors.NewInternalf(errors.CodeInternal, "failed to analyze query filter: %s", fmt.Sprint(r))
|
||||
}
|
||||
}()
|
||||
|
||||
var extractorType queryfilterextractor.ExtractorType
|
||||
switch queryType {
|
||||
case qbtypes.QueryTypePromQL:
|
||||
|
||||
@@ -194,6 +194,7 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewAddServiceAccountFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewDeprecateAPIKeyFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewServiceAccountAuthzactory(sqlstore),
|
||||
sqlmigration.NewDropUserDeletedAtFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
84
pkg/sqlmigration/076_drop_user_deleted_at.go
Normal file
84
pkg/sqlmigration/076_drop_user_deleted_at.go
Normal file
@@ -0,0 +1,84 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type dropUserDeletedAt struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewDropUserDeletedAtFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("drop_user_deleted_at"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
|
||||
return &dropUserDeletedAt{sqlstore: sqlstore, sqlschema: sqlschema}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *dropUserDeletedAt) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *dropUserDeletedAt) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
table, _, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletedAtColumn := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("deleted_at"),
|
||||
DataType: sqlschema.DataTypeTimestamp,
|
||||
Nullable: false,
|
||||
}
|
||||
|
||||
sqls := [][]byte{}
|
||||
|
||||
dropIndexSQLs := migration.sqlschema.Operator().DropIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"org_id", "email", "deleted_at"}})
|
||||
sqls = append(sqls, dropIndexSQLs...)
|
||||
|
||||
dropSQLs := migration.sqlschema.Operator().DropColumn(table, deletedAtColumn)
|
||||
sqls = append(sqls, dropSQLs...)
|
||||
|
||||
indexSQLs := migration.sqlschema.Operator().CreateIndex(
|
||||
&sqlschema.PartialUniqueIndex{
|
||||
TableName: "users",
|
||||
ColumnNames: []sqlschema.ColumnName{"email", "org_id"},
|
||||
Where: "status != 'deleted'",
|
||||
})
|
||||
sqls = append(sqls, indexSQLs...)
|
||||
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *dropUserDeletedAt) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
@@ -833,25 +832,13 @@ func buildJSONTestStatementBuilder(t *testing.T) *logQueryStatementBuilder {
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
resourceFilterFM := resourcefilter.NewFieldMapper()
|
||||
resourceFilterCB := resourcefilter.NewConditionBuilder(resourceFilterFM)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
resourceFilterFM,
|
||||
resourceFilterCB,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
@@ -33,13 +34,22 @@ func NewLogQueryStatementBuilder(
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *logQueryStatementBuilder {
|
||||
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
|
||||
|
||||
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.LogAggregation](
|
||||
settings,
|
||||
DBName,
|
||||
LogsResourceV2TableName,
|
||||
telemetrytypes.SignalLogs,
|
||||
metadataStore,
|
||||
fullTextColumn,
|
||||
jsonKeyToKey,
|
||||
)
|
||||
|
||||
return &logQueryStatementBuilder{
|
||||
logger: logsSettings.Logger(),
|
||||
metadataStore: metadataStore,
|
||||
|
||||
@@ -8,35 +8,12 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation] {
|
||||
fm := resourcefilter.NewFieldMapper()
|
||||
cb := resourcefilter.NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
keysMap := buildCompleteFieldKeyMap(time.Now())
|
||||
for _, keys := range keysMap {
|
||||
for _, key := range keys {
|
||||
key.Signal = telemetrytypes.SignalLogs
|
||||
}
|
||||
}
|
||||
mockMetadataStore.KeysMap = keysMap
|
||||
|
||||
return resourcefilter.NewLogResourceFilterStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
fm,
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
}
|
||||
|
||||
func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
|
||||
// Create a test release time
|
||||
@@ -225,14 +202,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -349,14 +323,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -492,14 +463,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -569,14 +537,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -665,14 +630,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -890,14 +852,11 @@ func TestAdjustKey(t *testing.T) {
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -1045,13 +1004,11 @@ func TestStmtBuilderBodyField(t *testing.T) {
|
||||
mockMetadataStore.KeysMap[field.Name] = append(mockMetadataStore.KeysMap[field.Name], &f)
|
||||
}
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
@@ -1135,13 +1092,11 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
|
||||
mockMetadataStore.KeysMap[field.Name] = append(mockMetadataStore.KeysMap[field.Name], &f)
|
||||
}
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
|
||||
@@ -8,6 +8,7 @@ const (
|
||||
TagAttributesV2LocalTableName = "tag_attributes_v2"
|
||||
LogAttributeKeysTblName = "distributed_logs_attribute_keys"
|
||||
LogResourceKeysTblName = "distributed_logs_resource_keys"
|
||||
LogsResourceV2TableName = "distributed_logs_v2_resource"
|
||||
PathTypesTableName = "distributed_json_path_types"
|
||||
PromotedPathsTableName = "distributed_json_promoted_paths"
|
||||
SkipIndexTableName = "system.data_skipping_indices"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package resourcefilter
|
||||
package telemetryresourcefilter
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -1,4 +1,4 @@
|
||||
package resourcefilter
|
||||
package telemetryresourcefilter
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -1,4 +1,4 @@
|
||||
package resourcefilter
|
||||
package telemetryresourcefilter
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -1,11 +1,10 @@
|
||||
package resourcefilter
|
||||
package telemetryresourcefilter
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
@@ -13,30 +12,11 @@ import (
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrUnsupportedSignal = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported signal type")
|
||||
)
|
||||
|
||||
// Configuration for different signal types.
|
||||
type signalConfig struct {
|
||||
dbName string
|
||||
tableName string
|
||||
}
|
||||
|
||||
var signalConfigs = map[telemetrytypes.Signal]signalConfig{
|
||||
telemetrytypes.SignalTraces: {
|
||||
dbName: TracesDBName,
|
||||
tableName: TraceResourceV3TableName,
|
||||
},
|
||||
telemetrytypes.SignalLogs: {
|
||||
dbName: LogsDBName,
|
||||
tableName: LogsResourceV2TableName,
|
||||
},
|
||||
}
|
||||
|
||||
// Generic resource filter statement builder.
|
||||
// resourceFilterStatementBuilder builds resource fingerprint filter CTEs.
|
||||
type resourceFilterStatementBuilder[T any] struct {
|
||||
logger *slog.Logger
|
||||
dbName string
|
||||
tableName string
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
conditionBuilder qbtypes.ConditionBuilder
|
||||
metadataStore telemetrytypes.MetadataStore
|
||||
@@ -52,38 +32,26 @@ var (
|
||||
_ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*resourceFilterStatementBuilder[qbtypes.LogAggregation])(nil)
|
||||
)
|
||||
|
||||
// NewTraceResourceFilterStatementBuilder creates a new trace resource filter statement builder.
|
||||
func NewTraceResourceFilterStatementBuilder(
|
||||
func New[T any](
|
||||
settings factory.ProviderSettings,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
|
||||
return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{
|
||||
logger: set.Logger(),
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalTraces,
|
||||
}
|
||||
}
|
||||
|
||||
func NewLogResourceFilterStatementBuilder(
|
||||
settings factory.ProviderSettings,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
dbName string,
|
||||
tableName string,
|
||||
signal telemetrytypes.Signal,
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
|
||||
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
|
||||
) *resourceFilterStatementBuilder[T] {
|
||||
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetryresourcefilter")
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
return &resourceFilterStatementBuilder[T]{
|
||||
logger: set.Logger(),
|
||||
fieldMapper: fieldMapper,
|
||||
conditionBuilder: conditionBuilder,
|
||||
dbName: dbName,
|
||||
tableName: tableName,
|
||||
fieldMapper: fm,
|
||||
conditionBuilder: cb,
|
||||
metadataStore: metadataStore,
|
||||
signal: telemetrytypes.SignalLogs,
|
||||
signal: signal,
|
||||
fullTextColumn: fullTextColumn,
|
||||
jsonKeyToKey: jsonKeyToKey,
|
||||
}
|
||||
@@ -120,14 +88,9 @@ func (b *resourceFilterStatementBuilder[T]) Build(
|
||||
query qbtypes.QueryBuilderQuery[T],
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (*qbtypes.Statement, error) {
|
||||
config, exists := signalConfigs[b.signal]
|
||||
if !exists {
|
||||
return nil, errors.WrapInvalidInputf(ErrUnsupportedSignal, errors.CodeInvalidInput, "unsupported signal: %s", b.signal)
|
||||
}
|
||||
|
||||
q := sqlbuilder.NewSelectBuilder()
|
||||
q.Select("fingerprint")
|
||||
q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName))
|
||||
q.From(fmt.Sprintf("%s.%s", b.dbName, b.tableName))
|
||||
|
||||
keySelectors := b.getKeySelectors(query)
|
||||
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
|
||||
@@ -1,4 +1,4 @@
|
||||
package resourcefilter
|
||||
package telemetryresourcefilter
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -367,16 +367,17 @@ func TestResourceFilterStatementBuilder_Traces(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildTestFieldKeyMap(telemetrytypes.SignalTraces)
|
||||
|
||||
builder := NewTraceResourceFilterStatementBuilder(
|
||||
builder := New[qbtypes.TraceAggregation](
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
fm,
|
||||
cb,
|
||||
"signoz_traces",
|
||||
"distributed_traces_v3_resource",
|
||||
telemetrytypes.SignalTraces,
|
||||
mockMetadataStore,
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
|
||||
for _, c := range cases {
|
||||
@@ -583,15 +584,14 @@ func TestResourceFilterStatementBuilder_Logs(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildTestFieldKeyMap(telemetrytypes.SignalLogs)
|
||||
|
||||
builder := NewLogResourceFilterStatementBuilder(
|
||||
builder := New[qbtypes.LogAggregation](
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
fm,
|
||||
cb,
|
||||
"signoz_logs",
|
||||
"distributed_logs_v2_resource",
|
||||
telemetrytypes.SignalLogs,
|
||||
mockMetadataStore,
|
||||
nil,
|
||||
nil,
|
||||
@@ -645,16 +645,17 @@ func TestResourceFilterStatementBuilder_Variables(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildTestFieldKeyMap(telemetrytypes.SignalTraces)
|
||||
|
||||
builder := NewTraceResourceFilterStatementBuilder(
|
||||
builder := New[qbtypes.TraceAggregation](
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
fm,
|
||||
cb,
|
||||
"signoz_traces",
|
||||
"distributed_traces_v3_resource",
|
||||
telemetrytypes.SignalTraces,
|
||||
mockMetadataStore,
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
|
||||
for _, c := range cases {
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
@@ -38,11 +39,21 @@ func NewTraceQueryStatementBuilder(
|
||||
metadataStore telemetrytypes.MetadataStore,
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
) *traceQueryStatementBuilder {
|
||||
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
|
||||
|
||||
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.TraceAggregation](
|
||||
settings,
|
||||
DBName,
|
||||
TracesResourceV3TableName,
|
||||
telemetrytypes.SignalTraces,
|
||||
metadataStore,
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
|
||||
return &traceQueryStatementBuilder{
|
||||
logger: tracesSettings.Logger(),
|
||||
metadataStore: metadataStore,
|
||||
|
||||
@@ -8,27 +8,12 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.TraceAggregation] {
|
||||
fm := resourcefilter.NewFieldMapper()
|
||||
cb := resourcefilter.NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
|
||||
return resourcefilter.NewTraceResourceFilterStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
fm,
|
||||
cb,
|
||||
mockMetadataStore,
|
||||
)
|
||||
}
|
||||
|
||||
func TestStatementBuilder(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
@@ -372,14 +357,11 @@ func TestStatementBuilder(t *testing.T) {
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -668,14 +650,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -778,14 +757,11 @@ func TestStatementBuilderListQueryWithCorruptData(t *testing.T) {
|
||||
}
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -931,14 +907,11 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -1147,14 +1120,11 @@ func TestAdjustKey(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -1422,14 +1392,11 @@ func TestAdjustKeys(t *testing.T) {
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
|
||||
@@ -9,4 +9,5 @@ const (
|
||||
TopLevelOperationsTableName = "distributed_top_level_operations"
|
||||
TraceSummaryTableName = "distributed_trace_summary"
|
||||
SpanAttributesKeysTblName = "distributed_span_attributes_keys"
|
||||
TracesResourceV3TableName = "distributed_traces_v3_resource"
|
||||
)
|
||||
|
||||
@@ -392,13 +392,11 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -409,7 +407,6 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
|
||||
fm,
|
||||
cb,
|
||||
traceStmtBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
)
|
||||
|
||||
@@ -508,13 +505,11 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
traceStmtBuilder := NewTraceQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil,
|
||||
)
|
||||
@@ -525,7 +520,6 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
|
||||
fm,
|
||||
cb,
|
||||
traceStmtBuilder,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -29,10 +30,20 @@ func NewTraceOperatorStatementBuilder(
|
||||
fieldMapper qbtypes.FieldMapper,
|
||||
conditionBuilder qbtypes.ConditionBuilder,
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
aggExprRewriter qbtypes.AggExprRewriter,
|
||||
) *traceOperatorStatementBuilder {
|
||||
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
|
||||
|
||||
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.TraceAggregation](
|
||||
settings,
|
||||
DBName,
|
||||
TracesResourceV3TableName,
|
||||
telemetrytypes.SignalTraces,
|
||||
metadataStore,
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
|
||||
return &traceOperatorStatementBuilder{
|
||||
logger: tracesSettings.Logger(),
|
||||
metadataStore: metadataStore,
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
@@ -35,15 +34,6 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
}}
|
||||
|
||||
resourceFilterFM := resourcefilter.NewFieldMapper()
|
||||
resourceFilterCB := resourcefilter.NewConditionBuilder(resourceFilterFM)
|
||||
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
resourceFilterFM,
|
||||
resourceFilterCB,
|
||||
mockMetadataStore,
|
||||
)
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
|
||||
statementBuilder := NewTraceQueryStatementBuilder(
|
||||
@@ -51,7 +41,6 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
nil, // telemetryStore is nil - optimization won't happen but code path is tested
|
||||
)
|
||||
|
||||
366
pkg/types/dashboardtypes/dashboard_v2.go
Normal file
366
pkg/types/dashboardtypes/dashboard_v2.go
Normal file
@@ -0,0 +1,366 @@
|
||||
package dashboardtypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
qb "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/go-playground/validator/v10"
|
||||
v1 "github.com/perses/perses/pkg/model/api/v1"
|
||||
"github.com/perses/perses/pkg/model/api/v1/common"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type StorableDashboardDataV2 = v1.Dashboard
|
||||
|
||||
type StorableDashboardV2 struct {
|
||||
bun.BaseModel `bun:"table:dashboard,alias:dashboard"`
|
||||
|
||||
types.Identifiable
|
||||
// TimeAuditable is not embedded here — CreatedAt/UpdatedAt live in
|
||||
// Data.Metadata (Perses's ProjectMetadata) to avoid duplication.
|
||||
types.UserAuditable
|
||||
Data StorableDashboardDataV2 `bun:"data,type:text,notnull"`
|
||||
Locked bool `bun:"locked,notnull,default:false"`
|
||||
OrgID valuer.UUID `bun:"org_id,notnull"`
|
||||
Image string `bun:"image"`
|
||||
Tags []string `bun:"tags,array"`
|
||||
UploadedGrafana bool `bun:"uploaded_grafana,notnull,default:false"`
|
||||
Version string `bun:"version"`
|
||||
}
|
||||
|
||||
type DashboardV2 struct {
|
||||
// TimeAuditable is not embedded here — CreatedAt/UpdatedAt live in
|
||||
// Data.Metadata (Perses's ProjectMetadata) to avoid duplication.
|
||||
types.UserAuditable
|
||||
|
||||
ID string `json:"id"`
|
||||
Data StorableDashboardDataV2 `json:"data"`
|
||||
Locked bool `json:"locked"`
|
||||
OrgID valuer.UUID `json:"org_id"`
|
||||
Image string `json:"image,omitempty"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
UploadedGrafana bool `json:"uploadedGrafana,omitempty"`
|
||||
Version string `json:"version,omitempty"`
|
||||
}
|
||||
|
||||
type (
|
||||
GettableDashboardV2 = DashboardV2
|
||||
|
||||
UpdatableDashboardV2 = StorableDashboardDataV2
|
||||
|
||||
PostableDashboardV2 = StorableDashboardDataV2
|
||||
|
||||
ListableDashboardV2 []*GettableDashboardV2
|
||||
)
|
||||
|
||||
func NewStorableDashboardV2FromDashboardV2(dashboard *DashboardV2) (*StorableDashboardV2, error) {
|
||||
dashboardID, err := valuer.NewUUID(dashboard.ID)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "id is not a valid uuid")
|
||||
}
|
||||
|
||||
return &StorableDashboardV2{
|
||||
Identifiable: types.Identifiable{
|
||||
ID: dashboardID,
|
||||
},
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: dashboard.CreatedBy,
|
||||
UpdatedBy: dashboard.UpdatedBy,
|
||||
},
|
||||
OrgID: dashboard.OrgID,
|
||||
Data: dashboard.Data,
|
||||
Locked: dashboard.Locked,
|
||||
Image: dashboard.Image,
|
||||
Tags: dashboard.Tags,
|
||||
UploadedGrafana: dashboard.UploadedGrafana,
|
||||
Version: dashboard.Version,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewDashboardV2(orgID valuer.UUID, createdBy string, data StorableDashboardDataV2) (*DashboardV2, error) {
|
||||
currentTime := time.Now()
|
||||
data.Metadata.CreatedAt = currentTime
|
||||
data.Metadata.UpdatedAt = currentTime
|
||||
|
||||
return &DashboardV2{
|
||||
ID: valuer.GenerateUUID().StringValue(),
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: createdBy,
|
||||
UpdatedBy: createdBy,
|
||||
},
|
||||
OrgID: orgID,
|
||||
Data: data,
|
||||
Locked: false,
|
||||
Version: "v6",
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewDashboardV2FromStorableDashboard(storableDashboard *StorableDashboardV2) *DashboardV2 {
|
||||
return &DashboardV2{
|
||||
ID: storableDashboard.ID.StringValue(),
|
||||
UserAuditable: types.UserAuditable{
|
||||
CreatedBy: storableDashboard.CreatedBy,
|
||||
UpdatedBy: storableDashboard.UpdatedBy,
|
||||
},
|
||||
OrgID: storableDashboard.OrgID,
|
||||
Data: storableDashboard.Data,
|
||||
Locked: storableDashboard.Locked,
|
||||
Image: storableDashboard.Image,
|
||||
Tags: storableDashboard.Tags,
|
||||
UploadedGrafana: storableDashboard.UploadedGrafana,
|
||||
Version: storableDashboard.Version,
|
||||
}
|
||||
}
|
||||
|
||||
func NewDashboardsV2FromStorableDashboards(storableDashboards []*StorableDashboardV2) []*DashboardV2 {
|
||||
dashboards := make([]*DashboardV2, len(storableDashboards))
|
||||
for idx, storableDashboard := range storableDashboards {
|
||||
dashboards[idx] = NewDashboardV2FromStorableDashboard(storableDashboard)
|
||||
}
|
||||
return dashboards
|
||||
}
|
||||
|
||||
func NewGettableDashboardsV2FromDashboards(dashboards []*DashboardV2) ([]*GettableDashboardV2, error) {
|
||||
gettableDashboards := make([]*GettableDashboardV2, len(dashboards))
|
||||
for idx, d := range dashboards {
|
||||
gettableDashboard, err := NewGettableDashboardV2FromDashboard(d)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gettableDashboards[idx] = gettableDashboard
|
||||
}
|
||||
return gettableDashboards, nil
|
||||
}
|
||||
|
||||
func NewGettableDashboardV2FromDashboard(dashboard *DashboardV2) (*GettableDashboardV2, error) {
|
||||
return &GettableDashboardV2{
|
||||
ID: dashboard.ID,
|
||||
UserAuditable: dashboard.UserAuditable,
|
||||
OrgID: dashboard.OrgID,
|
||||
Data: dashboard.Data,
|
||||
Locked: dashboard.Locked,
|
||||
Image: dashboard.Image,
|
||||
Tags: dashboard.Tags,
|
||||
UploadedGrafana: dashboard.UploadedGrafana,
|
||||
Version: dashboard.Version,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (dashboard *DashboardV2) Update(ctx context.Context, updatableDashboard UpdatableDashboardV2, updatedBy string) error {
|
||||
if dashboard.Locked {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "cannot update a locked dashboard, please unlock the dashboard to update")
|
||||
}
|
||||
dashboard.UpdatedBy = updatedBy
|
||||
updatableDashboard.Metadata.UpdatedAt = time.Now()
|
||||
dashboard.Data = updatableDashboard
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dashboard *DashboardV2) LockUnlock(lock bool, role types.Role, updatedBy string) error {
|
||||
if dashboard.CreatedBy != updatedBy && role != types.RoleAdmin {
|
||||
return errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "you are not authorized to lock/unlock this dashboard")
|
||||
}
|
||||
dashboard.Locked = lock
|
||||
dashboard.UpdatedBy = updatedBy
|
||||
dashboard.Data.Metadata.UpdatedAt = time.Now()
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateDashboardV2JSON validates a dashboard v2 JSON by unmarshalling into typed structs
|
||||
// and then validating plugin kinds and specs.
|
||||
func ValidateDashboardV2JSON(data []byte) error {
|
||||
var d StorableDashboardDataV2
|
||||
if err := json.Unmarshal(data, &d); err != nil {
|
||||
return err
|
||||
}
|
||||
return validateDashboardV2(d)
|
||||
}
|
||||
|
||||
// Plugin kind → spec type factory. Each value is a pointer to the zero value of the
|
||||
// expected spec struct. validatePluginSpec marshals plugin.Spec back to JSON and
|
||||
// unmarshals into the typed struct to catch field-level errors.
|
||||
var (
|
||||
panelPluginSpecs = map[string]func() any{
|
||||
PanelKindTimeSeries: func() any { return new(TimeSeriesPanelSpec) },
|
||||
PanelKindBarChart: func() any { return new(BarChartPanelSpec) },
|
||||
PanelKindNumber: func() any { return new(NumberPanelSpec) },
|
||||
PanelKindPieChart: func() any { return new(PieChartPanelSpec) },
|
||||
PanelKindTable: func() any { return new(TablePanelSpec) },
|
||||
PanelKindHistogram: func() any { return new(HistogramPanelSpec) },
|
||||
PanelKindList: func() any { return new(ListPanelSpec) },
|
||||
}
|
||||
queryPluginSpecs = map[string]func() any{
|
||||
QueryKindBuilder: func() any { return new(BuilderQuerySpec) },
|
||||
QueryKindComposite: func() any { return new(CompositeQuerySpec) },
|
||||
QueryKindFormula: func() any { return new(FormulaSpec) },
|
||||
QueryKindPromQL: func() any { return new(PromQLQuerySpec) },
|
||||
QueryKindClickHouseSQL: func() any { return new(ClickHouseSQLQuerySpec) },
|
||||
QueryKindTraceOperator: func() any { return new(TraceOperatorSpec) },
|
||||
}
|
||||
variablePluginSpecs = map[string]func() any{
|
||||
VariableKindDynamic: func() any { return new(DynamicVariableSpec) },
|
||||
VariableKindQuery: func() any { return new(QueryVariableSpec) },
|
||||
VariableKindCustom: func() any { return new(CustomVariableSpec) },
|
||||
VariableKindTextbox: func() any { return new(TextboxVariableSpec) },
|
||||
}
|
||||
datasourcePluginSpecs = map[string]func() any{
|
||||
DatasourceKindSigNoz: func() any { return new(struct{}) },
|
||||
}
|
||||
|
||||
// allowedQueryKinds maps each panel plugin kind to the query plugin
|
||||
// kinds it supports. Composite sub-query types are mapped to these
|
||||
// same kind strings via compositeSubQueryTypeToPluginKind.
|
||||
allowedQueryKinds = map[string][]string{
|
||||
PanelKindTimeSeries: {QueryKindBuilder, QueryKindComposite, QueryKindFormula, QueryKindTraceOperator, QueryKindPromQL, QueryKindClickHouseSQL},
|
||||
PanelKindBarChart: {QueryKindBuilder, QueryKindComposite, QueryKindFormula, QueryKindTraceOperator, QueryKindPromQL, QueryKindClickHouseSQL},
|
||||
PanelKindNumber: {QueryKindBuilder, QueryKindComposite, QueryKindFormula, QueryKindTraceOperator, QueryKindPromQL, QueryKindClickHouseSQL},
|
||||
PanelKindHistogram: {QueryKindBuilder, QueryKindComposite, QueryKindFormula, QueryKindTraceOperator, QueryKindPromQL, QueryKindClickHouseSQL},
|
||||
PanelKindPieChart: {QueryKindBuilder, QueryKindComposite, QueryKindFormula, QueryKindTraceOperator, QueryKindClickHouseSQL},
|
||||
PanelKindTable: {QueryKindBuilder, QueryKindComposite, QueryKindFormula, QueryKindTraceOperator, QueryKindClickHouseSQL},
|
||||
PanelKindList: {QueryKindBuilder},
|
||||
}
|
||||
|
||||
// compositeSubQueryTypeToPluginKind maps CompositeQuery sub-query type
|
||||
// strings to the equivalent top-level query plugin kind for validation.
|
||||
compositeSubQueryTypeToPluginKind = map[string]string{
|
||||
qb.QueryTypeBuilder.StringValue(): QueryKindBuilder,
|
||||
qb.QueryTypeFormula.StringValue(): QueryKindFormula,
|
||||
qb.QueryTypeTraceOperator.StringValue(): QueryKindTraceOperator,
|
||||
qb.QueryTypePromQL.StringValue(): QueryKindPromQL,
|
||||
qb.QueryTypeClickHouseSQL.StringValue(): QueryKindClickHouseSQL,
|
||||
}
|
||||
)
|
||||
|
||||
func validateDashboardV2(d StorableDashboardDataV2) error {
|
||||
// Validate datasource plugins.
|
||||
for name, ds := range d.Spec.Datasources {
|
||||
if err := validatePlugin(ds.Plugin, datasourcePluginSpecs, fmt.Sprintf("spec.datasources.%s.plugin", name)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Validate variable plugins (only ListVariables have plugins; TextVariables do not).
|
||||
for i, v := range d.Spec.Variables {
|
||||
plugin, err := extractPluginFromVariable(v)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, ErrCodeDashboardInvalidInput, "spec.variables[%d]", i)
|
||||
}
|
||||
if plugin == nil {
|
||||
continue
|
||||
}
|
||||
if err := validatePlugin(*plugin, variablePluginSpecs, fmt.Sprintf("spec.variables[%d].spec.plugin", i)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Validate panel and query plugins.
|
||||
for key, panel := range d.Spec.Panels {
|
||||
if panel == nil {
|
||||
continue
|
||||
}
|
||||
path := fmt.Sprintf("spec.panels.%s", key)
|
||||
if err := validatePlugin(panel.Spec.Plugin, panelPluginSpecs, path+".spec.plugin"); err != nil {
|
||||
return err
|
||||
}
|
||||
allowed := allowedQueryKinds[panel.Spec.Plugin.Kind]
|
||||
for qi, query := range panel.Spec.Queries {
|
||||
queryPath := fmt.Sprintf("%s.spec.queries[%d].spec.plugin", path, qi)
|
||||
if err := validatePlugin(query.Spec.Plugin, queryPluginSpecs, queryPath); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := validateQueryAllowedForPanel(query.Spec.Plugin, allowed, panel.Spec.Plugin.Kind, queryPath); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func validatePlugin(plugin common.Plugin, specs map[string]func() any, path string) error {
|
||||
if plugin.Kind == "" {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "%s: plugin kind is required", path)
|
||||
}
|
||||
factory, ok := specs[plugin.Kind]
|
||||
if !ok {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "%s: unknown plugin kind %q", path, plugin.Kind)
|
||||
}
|
||||
if plugin.Spec == nil {
|
||||
return nil
|
||||
}
|
||||
// Re-marshal the spec and unmarshal into the typed struct.
|
||||
specJSON, err := json.Marshal(plugin.Spec)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, ErrCodeDashboardInvalidInput, "%s.spec", path)
|
||||
}
|
||||
target := factory()
|
||||
if err := json.Unmarshal(specJSON, target); err != nil {
|
||||
return errors.WrapInvalidInputf(err, ErrCodeDashboardInvalidInput, "%s.spec", path)
|
||||
}
|
||||
if err := validator.New().Struct(target); err != nil {
|
||||
return errors.WrapInvalidInputf(err, ErrCodeDashboardInvalidInput, "%s.spec", path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateQueryAllowedForPanel checks that the query plugin kind is permitted
|
||||
// for the given panel. For composite queries it recurses into sub-queries.
|
||||
func validateQueryAllowedForPanel(plugin common.Plugin, allowed []string, panelKind string, path string) error {
|
||||
if !slices.Contains(allowed, plugin.Kind) {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput,
|
||||
"%s: query kind %q is not supported by panel kind %q", path, plugin.Kind, panelKind)
|
||||
}
|
||||
|
||||
// For composite queries, validate each sub-query type.
|
||||
if plugin.Kind == QueryKindComposite && plugin.Spec != nil {
|
||||
specJSON, err := json.Marshal(plugin.Spec)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, ErrCodeDashboardInvalidInput, "%s.spec", path)
|
||||
}
|
||||
var composite struct {
|
||||
Queries []struct {
|
||||
Type string `json:"type"`
|
||||
} `json:"queries"`
|
||||
}
|
||||
if err := json.Unmarshal(specJSON, &composite); err != nil {
|
||||
return errors.WrapInvalidInputf(err, ErrCodeDashboardInvalidInput, "%s.spec", path)
|
||||
}
|
||||
for si, sub := range composite.Queries {
|
||||
pluginKind, ok := compositeSubQueryTypeToPluginKind[sub.Type]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if !slices.Contains(allowed, pluginKind) {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput,
|
||||
"%s.spec.queries[%d]: sub-query type %q is not supported by panel kind %q",
|
||||
path, si, sub.Type, panelKind)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// extractPluginFromVariable extracts the plugin from a variable.
|
||||
// Returns nil if the variable has no plugin (e.g. TextVariable).
|
||||
func extractPluginFromVariable(v any) (*common.Plugin, error) {
|
||||
data, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var raw struct {
|
||||
Spec struct {
|
||||
Plugin *common.Plugin `json:"plugin,omitempty"`
|
||||
} `json:"spec"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &raw); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return raw.Spec.Plugin, nil
|
||||
}
|
||||
661
pkg/types/dashboardtypes/dashboard_v2_test.go
Normal file
661
pkg/types/dashboardtypes/dashboard_v2_test.go
Normal file
@@ -0,0 +1,661 @@
|
||||
package dashboardtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestValidateBigExample(t *testing.T) {
|
||||
data, err := os.ReadFile("testdata/perses.json")
|
||||
if err != nil {
|
||||
t.Fatalf("reading example file: %v", err)
|
||||
}
|
||||
if err := ValidateDashboardV2JSON(data); err != nil {
|
||||
t.Fatalf("expected valid dashboard, got error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateDashboardWithSections(t *testing.T) {
|
||||
data, err := os.ReadFile("testdata/perses_with_sections.json")
|
||||
if err != nil {
|
||||
t.Fatalf("reading example file: %v", err)
|
||||
}
|
||||
if err := ValidateDashboardV2JSON(data); err != nil {
|
||||
t.Fatalf("expected valid dashboard, got error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateNotAJSON(t *testing.T) {
|
||||
if err := ValidateDashboardV2JSON([]byte("not json")); err == nil {
|
||||
t.Fatal("expected error for invalid JSON")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateEmptyObject(t *testing.T) {
|
||||
if err := ValidateDashboardV2JSON([]byte("{}")); err == nil {
|
||||
t.Fatal("expected error for empty object missing kind")
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateEmptySpec(t *testing.T) {
|
||||
// no variables no panels
|
||||
data := []byte(`{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test"},
|
||||
"spec": {}
|
||||
}`)
|
||||
if err := ValidateDashboardV2JSON(data); err != nil {
|
||||
t.Fatalf("expected valid, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateOnlyVariables(t *testing.T) {
|
||||
data := []byte(`{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"variables": [
|
||||
{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "service",
|
||||
"allowAllValue": true,
|
||||
"allowMultiple": false,
|
||||
"plugin": {
|
||||
"kind": "SigNozDynamicVariable",
|
||||
"spec": {"name": "service.name", "source": "Metrics"}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "TextVariable",
|
||||
"spec": {
|
||||
"name": "mytext",
|
||||
"value": "default",
|
||||
"plugin": {
|
||||
"kind": "SigNozTextboxVariable",
|
||||
"spec": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"layouts": []
|
||||
}
|
||||
}`)
|
||||
if err := ValidateDashboardV2JSON(data); err != nil {
|
||||
t.Fatalf("expected valid, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateWrongKindAtTop(t *testing.T) {
|
||||
data := []byte(`{"kind": 123}`)
|
||||
if err := ValidateDashboardV2JSON(data); err == nil {
|
||||
t.Fatal("expected error for wrong type on kind field")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateUnknownPluginKind(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data string
|
||||
wantContain string
|
||||
}{
|
||||
{
|
||||
name: "unknown panel plugin",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {"kind": "NonExistentPanel", "spec": {}}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "NonExistentPanel",
|
||||
},
|
||||
{
|
||||
name: "unknown query plugin",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {"kind": "SigNozTimeSeriesPanel", "spec": {}},
|
||||
"queries": [{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {"kind": "FakeQueryPlugin", "spec": {}}
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "FakeQueryPlugin",
|
||||
},
|
||||
{
|
||||
name: "unknown variable plugin",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"variables": [{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "v1",
|
||||
"allowAllValue": false,
|
||||
"allowMultiple": false,
|
||||
"plugin": {"kind": "FakeVariable", "spec": {}}
|
||||
}
|
||||
}],
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "FakeVariable",
|
||||
},
|
||||
{
|
||||
name: "unknown datasource plugin",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"datasources": {
|
||||
"ds1": {
|
||||
"default": true,
|
||||
"plugin": {"kind": "FakeDatasource", "spec": {}}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "FakeDatasource",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := ValidateDashboardV2JSON([]byte(tt.data))
|
||||
if err == nil {
|
||||
t.Fatalf("expected error containing %q, got nil", tt.wantContain)
|
||||
}
|
||||
if !strings.Contains(err.Error(), tt.wantContain) {
|
||||
t.Fatalf("error should mention %q, got: %v", tt.wantContain, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateOneInvalidPanel(t *testing.T) {
|
||||
data := []byte(`{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"good": {
|
||||
"kind": "Panel",
|
||||
"spec": {"plugin": {"kind": "SigNozNumberPanel", "spec": {}}}
|
||||
},
|
||||
"bad": {
|
||||
"kind": "Panel",
|
||||
"spec": {"plugin": {"kind": "FakePanel", "spec": {}}}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`)
|
||||
err := ValidateDashboardV2JSON(data)
|
||||
if err == nil {
|
||||
t.Fatal("expected error for invalid panel plugin kind")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "FakePanel") {
|
||||
t.Fatalf("error should mention FakePanel, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateWrongFieldTypeInPluginSpec(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data string
|
||||
wantContain string
|
||||
}{
|
||||
{
|
||||
name: "wrong type on panel plugin field",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {"visualization": {"fillSpans": "notabool"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "fillSpans",
|
||||
},
|
||||
{
|
||||
name: "wrong type on query plugin field",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {"kind": "SigNozTimeSeriesPanel", "spec": {}},
|
||||
"queries": [{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozPromQLQuery",
|
||||
"spec": {"name": "A", "query": 123}
|
||||
}
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "",
|
||||
},
|
||||
{
|
||||
name: "wrong type on variable plugin field",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"variables": [{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "v",
|
||||
"allowAllValue": false,
|
||||
"allowMultiple": false,
|
||||
"plugin": {
|
||||
"kind": "SigNozDynamicVariable",
|
||||
"spec": {"name": 123, "source": "Metrics"}
|
||||
}
|
||||
}
|
||||
}],
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := ValidateDashboardV2JSON([]byte(tt.data))
|
||||
if err == nil {
|
||||
t.Fatal("expected validation error")
|
||||
}
|
||||
if tt.wantContain != "" && !strings.Contains(err.Error(), tt.wantContain) {
|
||||
t.Fatalf("error should mention %q, got: %v", tt.wantContain, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidateBadPanelSpecValues(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data string
|
||||
wantContain string
|
||||
}{
|
||||
{
|
||||
name: "bad time preference",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {"visualization": {"timePreference": "last2Hr"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "timePreference",
|
||||
},
|
||||
{
|
||||
name: "bad legend position",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBarChartPanel",
|
||||
"spec": {"legend": {"position": "top"}}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "legend position",
|
||||
},
|
||||
{
|
||||
name: "bad threshold format",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozNumberPanel",
|
||||
"spec": {"thresholds": [{"value": 100, "operator": ">", "color": "Red", "format": "Color"}]}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "threshold format",
|
||||
},
|
||||
{
|
||||
name: "bad comparison operator",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozNumberPanel",
|
||||
"spec": {"thresholds": [{"value": 100, "operator": "!=", "color": "Red", "format": "Text"}]}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "comparison operator",
|
||||
},
|
||||
{
|
||||
name: "bad precision",
|
||||
data: `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {"formatting": {"decimalPrecision": 9}}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`,
|
||||
wantContain: "precision",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := ValidateDashboardV2JSON([]byte(tt.data))
|
||||
if err == nil {
|
||||
t.Fatalf("expected error containing %q, got nil", tt.wantContain)
|
||||
}
|
||||
if !strings.Contains(err.Error(), tt.wantContain) {
|
||||
t.Fatalf("error should mention %q, got: %v", tt.wantContain, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateRequiredFields(t *testing.T) {
|
||||
wrapVariable := func(pluginKind, pluginSpec string) string {
|
||||
return `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"variables": [{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "v",
|
||||
"allowAllValue": false,
|
||||
"allowMultiple": false,
|
||||
"plugin": {"kind": "` + pluginKind + `", "spec": ` + pluginSpec + `}
|
||||
}
|
||||
}],
|
||||
"layouts": []
|
||||
}
|
||||
}`
|
||||
}
|
||||
wrapPanel := func(panelKind, panelSpec string) string {
|
||||
return `{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {"kind": "` + panelKind + `", "spec": ` + panelSpec + `}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
data string
|
||||
wantContain string
|
||||
}{
|
||||
{
|
||||
name: "DynamicVariable missing name",
|
||||
data: wrapVariable("SigNozDynamicVariable", `{"source": "Metrics"}`),
|
||||
wantContain: "Name",
|
||||
},
|
||||
{
|
||||
name: "DynamicVariable missing source",
|
||||
data: wrapVariable("SigNozDynamicVariable", `{"name": "http.method"}`),
|
||||
wantContain: "Source",
|
||||
},
|
||||
{
|
||||
name: "QueryVariable missing queryValue",
|
||||
data: wrapVariable("SigNozQueryVariable", `{}`),
|
||||
wantContain: "QueryValue",
|
||||
},
|
||||
{
|
||||
name: "CustomVariable missing customValue",
|
||||
data: wrapVariable("SigNozCustomVariable", `{}`),
|
||||
wantContain: "CustomValue",
|
||||
},
|
||||
{
|
||||
name: "ThresholdWithLabel missing color",
|
||||
data: wrapPanel("SigNozTimeSeriesPanel", `{"thresholds": [{"value": 100, "label": "high", "color": ""}]}`),
|
||||
wantContain: "Color",
|
||||
},
|
||||
{
|
||||
name: "ThresholdWithLabel missing label",
|
||||
data: wrapPanel("SigNozTimeSeriesPanel", `{"thresholds": [{"value": 100, "color": "Red", "label": ""}]}`),
|
||||
wantContain: "Label",
|
||||
},
|
||||
{
|
||||
name: "ComparisonThreshold missing color",
|
||||
data: wrapPanel("SigNozNumberPanel", `{"thresholds": [{"value": 100, "operator": ">", "format": "Text", "color": ""}]}`),
|
||||
wantContain: "Color",
|
||||
},
|
||||
{
|
||||
name: "TableThreshold missing columnName",
|
||||
data: wrapPanel("SigNozTablePanel", `{"thresholds": [{"value": 100, "operator": ">", "format": "Text", "color": "Red", "columnName": ""}]}`),
|
||||
wantContain: "ColumnName",
|
||||
},
|
||||
{
|
||||
name: "LogField missing name",
|
||||
data: wrapPanel("SigNozListPanel", `{"selectedLogFields": [{"name": "", "type": "log", "dataType": "string"}]}`),
|
||||
wantContain: "Name",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := ValidateDashboardV2JSON([]byte(tt.data))
|
||||
if err == nil {
|
||||
t.Fatalf("expected error containing %q, got nil", tt.wantContain)
|
||||
}
|
||||
if !strings.Contains(err.Error(), tt.wantContain) {
|
||||
t.Fatalf("error should mention %q, got: %v", tt.wantContain, err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTimeSeriesPanelDefaults(t *testing.T) {
|
||||
data := []byte(`{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {
|
||||
"p1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": []
|
||||
}
|
||||
}`)
|
||||
var d StorableDashboardDataV2
|
||||
if err := json.Unmarshal(data, &d); err != nil {
|
||||
t.Fatalf("unmarshal failed: %v", err)
|
||||
}
|
||||
|
||||
specJSON, _ := json.Marshal(d.Spec.Panels["p1"].Spec.Plugin.Spec)
|
||||
var spec TimeSeriesPanelSpec
|
||||
if err := json.Unmarshal(specJSON, &spec); err != nil {
|
||||
t.Fatalf("unmarshal spec failed: %v", err)
|
||||
}
|
||||
|
||||
if spec.Formatting.DecimalPrecision.Value() != 2 {
|
||||
t.Fatalf("expected DecimalPrecision default 2, got %v", spec.Formatting.DecimalPrecision.Value())
|
||||
}
|
||||
if spec.ChartAppearance.LineInterpolation.Value() != "spline" {
|
||||
t.Fatalf("expected LineInterpolation default spline, got %v", spec.ChartAppearance.LineInterpolation.Value())
|
||||
}
|
||||
if spec.ChartAppearance.LineStyle.Value() != "solid" {
|
||||
t.Fatalf("expected LineStyle default solid, got %v", spec.ChartAppearance.LineStyle.Value())
|
||||
}
|
||||
if spec.ChartAppearance.FillMode.Value() != "solid" {
|
||||
t.Fatalf("expected FillMode default solid, got %v", spec.ChartAppearance.FillMode.Value())
|
||||
}
|
||||
if spec.ChartAppearance.SpanGaps.Value() != true {
|
||||
t.Fatalf("expected SpanGaps default true, got %v", spec.ChartAppearance.SpanGaps.Value())
|
||||
}
|
||||
}
|
||||
|
||||
func TestPanelTypeQueryTypeCompatibility(t *testing.T) {
|
||||
mkQuery := func(panelKind, queryKind, querySpec string) []byte {
|
||||
return []byte(`{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {"p1": {"kind": "Panel", "spec": {
|
||||
"plugin": {"kind": "` + panelKind + `", "spec": {}},
|
||||
"queries": [{"kind": "TimeSeriesQuery", "spec": {"plugin": {"kind": "` + queryKind + `", "spec": ` + querySpec + `}}}]
|
||||
}}},
|
||||
"layouts": []
|
||||
}
|
||||
}`)
|
||||
}
|
||||
mkComposite := func(panelKind, subType, subSpec string) []byte {
|
||||
return []byte(`{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {"name": "test", "project": "signoz"},
|
||||
"spec": {
|
||||
"panels": {"p1": {"kind": "Panel", "spec": {
|
||||
"plugin": {"kind": "` + panelKind + `", "spec": {}},
|
||||
"queries": [{"kind": "TimeSeriesQuery", "spec": {"plugin": {"kind": "SigNozCompositeQuery", "spec": {
|
||||
"queries": [{"type": "` + subType + `", "spec": ` + subSpec + `}]
|
||||
}}}}]
|
||||
}}},
|
||||
"layouts": []
|
||||
}
|
||||
}`)
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
name string
|
||||
data []byte
|
||||
wantErr bool
|
||||
}{
|
||||
// Top-level: allowed
|
||||
{"TimeSeries+PromQL", mkQuery("SigNozTimeSeriesPanel", "SigNozPromQLQuery", `{"name":"A","query":"up"}`), false},
|
||||
{"Table+ClickHouse", mkQuery("SigNozTablePanel", "SigNozClickHouseSQL", `{"name":"A","query":"SELECT 1"}`), false},
|
||||
{"List+Builder", mkQuery("SigNozListPanel", "SigNozBuilderQuery", `{"name":"A","signal":"logs"}`), false},
|
||||
// Top-level: rejected
|
||||
{"Table+PromQL", mkQuery("SigNozTablePanel", "SigNozPromQLQuery", `{"name":"A","query":"up"}`), true},
|
||||
{"List+ClickHouse", mkQuery("SigNozListPanel", "SigNozClickHouseSQL", `{"name":"A","query":"SELECT 1"}`), true},
|
||||
{"List+PromQL", mkQuery("SigNozListPanel", "SigNozPromQLQuery", `{"name":"A","query":"up"}`), true},
|
||||
{"List+Composite", mkQuery("SigNozListPanel", "SigNozCompositeQuery", `{"queries":[]}`), true},
|
||||
{"List+Formula", mkQuery("SigNozListPanel", "SigNozFormula", `{"name":"F1","expression":"A+B"}`), true},
|
||||
// Composite sub-queries
|
||||
{"Table+Composite(promql)", mkComposite("SigNozTablePanel", "promql", `{"name":"A","query":"up"}`), true},
|
||||
{"Table+Composite(clickhouse)", mkComposite("SigNozTablePanel", "clickhouse_sql", `{"name":"A","query":"SELECT 1"}`), false},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
err := ValidateDashboardV2JSON(tc.data)
|
||||
if tc.wantErr && err == nil {
|
||||
t.Fatalf("%s: expected error, got nil", tc.name)
|
||||
}
|
||||
if !tc.wantErr && err != nil {
|
||||
t.Fatalf("%s: expected valid, got: %v", tc.name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
541
pkg/types/dashboardtypes/perses_plugin_types.go
Normal file
541
pkg/types/dashboardtypes/perses_plugin_types.go
Normal file
@@ -0,0 +1,541 @@
|
||||
package dashboardtypes
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
qb "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// ══════════════════════════════════════════════
|
||||
// SigNoz variable plugin specs
|
||||
// ══════════════════════════════════════════════
|
||||
|
||||
type VariablePluginKind = string
|
||||
|
||||
const (
|
||||
VariableKindDynamic VariablePluginKind = "SigNozDynamicVariable"
|
||||
VariableKindQuery VariablePluginKind = "SigNozQueryVariable"
|
||||
VariableKindCustom VariablePluginKind = "SigNozCustomVariable"
|
||||
VariableKindTextbox VariablePluginKind = "SigNozTextboxVariable"
|
||||
)
|
||||
|
||||
type DynamicVariableSpec struct {
|
||||
// Name is the name of the attribute being fetched dynamically from the
|
||||
// source. This could be extended to a richer selector in the future.
|
||||
Name string `json:"name" validate:"required"`
|
||||
Source string `json:"source" validate:"required"`
|
||||
}
|
||||
|
||||
type QueryVariableSpec struct {
|
||||
QueryValue string `json:"queryValue" validate:"required"`
|
||||
}
|
||||
|
||||
type CustomVariableSpec struct {
|
||||
CustomValue string `json:"customValue" validate:"required"`
|
||||
}
|
||||
|
||||
type TextboxVariableSpec struct{}
|
||||
|
||||
// ══════════════════════════════════════════════
|
||||
// SigNoz query plugin specs — aliased from querybuildertypesv5
|
||||
// ══════════════════════════════════════════════
|
||||
|
||||
type QueryPluginKind = string
|
||||
|
||||
const (
|
||||
QueryKindBuilder QueryPluginKind = "SigNozBuilderQuery"
|
||||
QueryKindComposite QueryPluginKind = "SigNozCompositeQuery"
|
||||
QueryKindFormula QueryPluginKind = "SigNozFormula"
|
||||
QueryKindPromQL QueryPluginKind = "SigNozPromQLQuery"
|
||||
QueryKindClickHouseSQL QueryPluginKind = "SigNozClickHouseSQL"
|
||||
QueryKindTraceOperator QueryPluginKind = "SigNozTraceOperator"
|
||||
)
|
||||
|
||||
type (
|
||||
CompositeQuerySpec = qb.CompositeQuery
|
||||
QueryEnvelope = qb.QueryEnvelope
|
||||
FormulaSpec = qb.QueryBuilderFormula
|
||||
PromQLQuerySpec = qb.PromQuery
|
||||
ClickHouseSQLQuerySpec = qb.ClickHouseQuery
|
||||
TraceOperatorSpec = qb.QueryBuilderTraceOperator
|
||||
)
|
||||
|
||||
// BuilderQuerySpec dispatches to MetricBuilderQuerySpec, LogBuilderQuerySpec,
|
||||
// or TraceBuilderQuerySpec based on the signal field.
|
||||
|
||||
type (
|
||||
MetricBuilderQuerySpec = qb.QueryBuilderQuery[qb.MetricAggregation]
|
||||
LogBuilderQuerySpec = qb.QueryBuilderQuery[qb.LogAggregation]
|
||||
TraceBuilderQuerySpec = qb.QueryBuilderQuery[qb.TraceAggregation]
|
||||
)
|
||||
|
||||
type BuilderQuerySpec struct {
|
||||
Spec any
|
||||
}
|
||||
|
||||
func (b *BuilderQuerySpec) UnmarshalJSON(data []byte) error {
|
||||
var peek struct {
|
||||
Signal string `json:"signal"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &peek); err != nil {
|
||||
return err
|
||||
}
|
||||
switch peek.Signal {
|
||||
case "metrics":
|
||||
var spec MetricBuilderQuerySpec
|
||||
if err := json.Unmarshal(data, &spec); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Spec = spec
|
||||
case "logs":
|
||||
var spec LogBuilderQuerySpec
|
||||
if err := json.Unmarshal(data, &spec); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Spec = spec
|
||||
case "traces":
|
||||
var spec TraceBuilderQuerySpec
|
||||
if err := json.Unmarshal(data, &spec); err != nil {
|
||||
return err
|
||||
}
|
||||
b.Spec = spec
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid signal %q: must be metrics, logs, or traces", peek.Signal)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════
|
||||
// SigNoz panel plugin specs
|
||||
// ══════════════════════════════════════════════
|
||||
|
||||
type PanelPluginKind = string
|
||||
|
||||
const (
|
||||
PanelKindTimeSeries PanelPluginKind = "SigNozTimeSeriesPanel"
|
||||
PanelKindBarChart PanelPluginKind = "SigNozBarChartPanel"
|
||||
PanelKindNumber PanelPluginKind = "SigNozNumberPanel"
|
||||
PanelKindPieChart PanelPluginKind = "SigNozPieChartPanel"
|
||||
PanelKindTable PanelPluginKind = "SigNozTablePanel"
|
||||
PanelKindHistogram PanelPluginKind = "SigNozHistogramPanel"
|
||||
PanelKindList PanelPluginKind = "SigNozListPanel"
|
||||
)
|
||||
|
||||
type DatasourcePluginKind = string
|
||||
|
||||
const (
|
||||
DatasourceKindSigNoz DatasourcePluginKind = "SigNozDatasource"
|
||||
)
|
||||
|
||||
type TimeSeriesPanelSpec struct {
|
||||
Visualization TimeSeriesVisualization `json:"visualization"`
|
||||
Formatting PanelFormatting `json:"formatting"`
|
||||
ChartAppearance TimeSeriesChartAppearance `json:"chartAppearance"`
|
||||
Axes Axes `json:"axes"`
|
||||
Legend Legend `json:"legend"`
|
||||
Thresholds []ThresholdWithLabel `json:"thresholds" validate:"dive"`
|
||||
}
|
||||
|
||||
type TimeSeriesChartAppearance struct {
|
||||
LineInterpolation LineInterpolation `json:"lineInterpolation"`
|
||||
ShowPoints bool `json:"showPoints"`
|
||||
LineStyle LineStyle `json:"lineStyle"`
|
||||
FillMode FillMode `json:"fillMode"`
|
||||
SpanGaps SpanGaps `json:"spanGaps"`
|
||||
}
|
||||
|
||||
type BarChartPanelSpec struct {
|
||||
Visualization BarChartVisualization `json:"visualization"`
|
||||
Formatting PanelFormatting `json:"formatting"`
|
||||
Axes Axes `json:"axes"`
|
||||
Legend Legend `json:"legend"`
|
||||
Thresholds []ThresholdWithLabel `json:"thresholds" validate:"dive"`
|
||||
}
|
||||
|
||||
type NumberPanelSpec struct {
|
||||
Visualization BasicVisualization `json:"visualization"`
|
||||
Formatting PanelFormatting `json:"formatting"`
|
||||
Thresholds []ComparisonThreshold `json:"thresholds" validate:"dive"`
|
||||
}
|
||||
|
||||
type PieChartPanelSpec struct {
|
||||
Visualization BasicVisualization `json:"visualization"`
|
||||
Formatting PanelFormatting `json:"formatting"`
|
||||
Legend Legend `json:"legend"`
|
||||
}
|
||||
|
||||
type TablePanelSpec struct {
|
||||
Visualization BasicVisualization `json:"visualization"`
|
||||
Formatting TableFormatting `json:"formatting"`
|
||||
Thresholds []TableThreshold `json:"thresholds" validate:"dive"`
|
||||
}
|
||||
|
||||
type HistogramPanelSpec struct {
|
||||
HistogramBuckets HistogramBuckets `json:"histogramBuckets"`
|
||||
Legend Legend `json:"legend"`
|
||||
}
|
||||
|
||||
type HistogramBuckets struct {
|
||||
BucketCount *float64 `json:"bucketCount"`
|
||||
BucketWidth *float64 `json:"bucketWidth"`
|
||||
MergeAllActiveQueries bool `json:"mergeAllActiveQueries"`
|
||||
}
|
||||
|
||||
type ListPanelSpec struct {
|
||||
SelectedLogFields []LogField `json:"selectedLogFields" validate:"dive"`
|
||||
SelectedTracesFields []telemetrytypes.TelemetryFieldKey `json:"selectedTracesFields"`
|
||||
}
|
||||
|
||||
type LogField struct {
|
||||
Name string `json:"name" validate:"required"`
|
||||
Type string `json:"type"`
|
||||
DataType string `json:"dataType"`
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════
|
||||
// Panel common types
|
||||
// ══════════════════════════════════════════════
|
||||
|
||||
type Axes struct {
|
||||
SoftMin *float64 `json:"softMin"`
|
||||
SoftMax *float64 `json:"softMax"`
|
||||
IsLogScale bool `json:"isLogScale"`
|
||||
}
|
||||
|
||||
type BasicVisualization struct {
|
||||
TimePreference TimePreference `json:"timePreference"`
|
||||
}
|
||||
|
||||
type TimeSeriesVisualization struct {
|
||||
BasicVisualization
|
||||
FillSpans bool `json:"fillSpans"`
|
||||
}
|
||||
|
||||
type BarChartVisualization struct {
|
||||
BasicVisualization
|
||||
FillSpans bool `json:"fillSpans"`
|
||||
StackedBarChart bool `json:"stackedBarChart"`
|
||||
}
|
||||
|
||||
type PanelFormatting struct {
|
||||
Unit string `json:"unit"`
|
||||
DecimalPrecision PrecisionOption `json:"decimalPrecision"`
|
||||
}
|
||||
|
||||
type TableFormatting struct {
|
||||
ColumnUnits map[string]string `json:"columnUnits"`
|
||||
DecimalPrecision PrecisionOption `json:"decimalPrecision"`
|
||||
}
|
||||
|
||||
type Legend struct {
|
||||
Position LegendPosition `json:"position"`
|
||||
CustomColors map[string]string `json:"customColors"`
|
||||
}
|
||||
|
||||
type ThresholdWithLabel struct {
|
||||
Value float64 `json:"value"`
|
||||
Unit string `json:"unit"`
|
||||
Color string `json:"color" validate:"required"`
|
||||
Label string `json:"label" validate:"required"`
|
||||
}
|
||||
|
||||
type ComparisonThreshold struct {
|
||||
Value float64 `json:"value"`
|
||||
Operator ComparisonOperator `json:"operator"`
|
||||
Unit string `json:"unit"`
|
||||
Color string `json:"color" validate:"required"`
|
||||
Format ThresholdFormat `json:"format"`
|
||||
}
|
||||
|
||||
type TableThreshold struct {
|
||||
ComparisonThreshold
|
||||
ColumnName string `json:"columnName" validate:"required"`
|
||||
}
|
||||
|
||||
// ══════════════════════════════════════════════
|
||||
// Constrained scalar types (enum validation via custom UnmarshalJSON)
|
||||
// ══════════════════════════════════════════════
|
||||
|
||||
// LineInterpolation: "linear" | "spline" | "stepAfter" | "stepBefore". Default is "spline".
|
||||
type LineInterpolation struct {
|
||||
value string
|
||||
}
|
||||
|
||||
const (
|
||||
LineInterpolationLinear = "linear"
|
||||
LineInterpolationSpline = "spline"
|
||||
LineInterpolationStepAfter = "stepAfter"
|
||||
LineInterpolationStepBefore = "stepBefore"
|
||||
)
|
||||
|
||||
func (li LineInterpolation) Value() string {
|
||||
if li.value == "" {
|
||||
return LineInterpolationSpline
|
||||
}
|
||||
return li.value
|
||||
}
|
||||
|
||||
func (li *LineInterpolation) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch v {
|
||||
case LineInterpolationLinear, LineInterpolationSpline, LineInterpolationStepAfter, LineInterpolationStepBefore:
|
||||
li.value = v
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid line interpolation %q: must be linear, spline, stepAfter, or stepBefore", v)
|
||||
}
|
||||
}
|
||||
|
||||
func (li LineInterpolation) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(li.Value())
|
||||
}
|
||||
|
||||
// LineStyle: "solid" | "dashed". Default is "solid".
|
||||
type LineStyle struct {
|
||||
value string
|
||||
}
|
||||
|
||||
const (
|
||||
LineStyleSolid = "solid"
|
||||
LineStyleDashed = "dashed"
|
||||
)
|
||||
|
||||
func (ls LineStyle) Value() string {
|
||||
if ls.value == "" {
|
||||
return LineStyleSolid
|
||||
}
|
||||
return ls.value
|
||||
}
|
||||
|
||||
func (ls *LineStyle) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch v {
|
||||
case LineStyleSolid, LineStyleDashed:
|
||||
ls.value = v
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid line style %q: must be solid or dashed", v)
|
||||
}
|
||||
}
|
||||
|
||||
func (ls LineStyle) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(ls.Value())
|
||||
}
|
||||
|
||||
// FillMode: "solid" | "gradient" | "none". Default is "solid".
|
||||
type FillMode struct {
|
||||
value string
|
||||
}
|
||||
|
||||
const (
|
||||
FillModeSolid = "solid"
|
||||
FillModeGradient = "gradient"
|
||||
FillModeNone = "none"
|
||||
)
|
||||
|
||||
func (fm FillMode) Value() string {
|
||||
if fm.value == "" {
|
||||
return FillModeSolid
|
||||
}
|
||||
return fm.value
|
||||
}
|
||||
|
||||
func (fm *FillMode) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch v {
|
||||
case FillModeSolid, FillModeGradient, FillModeNone:
|
||||
fm.value = v
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid fill mode %q: must be solid, gradient, or none", v)
|
||||
}
|
||||
}
|
||||
|
||||
func (fm FillMode) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(fm.Value())
|
||||
}
|
||||
|
||||
// TimePreference: "globalTime" | "last5Min" | "last15Min" | "last30Min" | "last1Hr" | "last6Hr" | "last1Day" | "last3Days" | "last1Week" | "last1Month".
|
||||
type TimePreference string
|
||||
|
||||
const (
|
||||
TimePreferenceGlobalTime TimePreference = "globalTime"
|
||||
TimePreferenceLast5Min TimePreference = "last5Min"
|
||||
TimePreferenceLast15Min TimePreference = "last15Min"
|
||||
TimePreferenceLast30Min TimePreference = "last30Min"
|
||||
TimePreferenceLast1Hr TimePreference = "last1Hr"
|
||||
TimePreferenceLast6Hr TimePreference = "last6Hr"
|
||||
TimePreferenceLast1Day TimePreference = "last1Day"
|
||||
TimePreferenceLast3Days TimePreference = "last3Days"
|
||||
TimePreferenceLast1Week TimePreference = "last1Week"
|
||||
TimePreferenceLast1Month TimePreference = "last1Month"
|
||||
)
|
||||
|
||||
func (t *TimePreference) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch TimePreference(v) {
|
||||
case TimePreferenceGlobalTime, TimePreferenceLast5Min, TimePreferenceLast15Min, TimePreferenceLast30Min, TimePreferenceLast1Hr, TimePreferenceLast6Hr, TimePreferenceLast1Day, TimePreferenceLast3Days, TimePreferenceLast1Week, TimePreferenceLast1Month:
|
||||
*t = TimePreference(v)
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid timePreference %q", v)
|
||||
}
|
||||
}
|
||||
|
||||
// LegendPosition: "bottom" | "right".
|
||||
type LegendPosition string
|
||||
|
||||
const (
|
||||
LegendPositionBottom LegendPosition = "bottom"
|
||||
LegendPositionRight LegendPosition = "right"
|
||||
)
|
||||
|
||||
func (l *LegendPosition) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch LegendPosition(v) {
|
||||
case LegendPositionBottom, LegendPositionRight:
|
||||
*l = LegendPosition(v)
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid legend position %q: must be bottom or right", v)
|
||||
}
|
||||
}
|
||||
|
||||
// ThresholdFormat: "Text" | "Background".
|
||||
type ThresholdFormat string
|
||||
|
||||
const (
|
||||
ThresholdFormatText ThresholdFormat = "Text"
|
||||
ThresholdFormatBackground ThresholdFormat = "Background"
|
||||
)
|
||||
|
||||
func (f *ThresholdFormat) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch ThresholdFormat(v) {
|
||||
case ThresholdFormatText, ThresholdFormatBackground:
|
||||
*f = ThresholdFormat(v)
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid threshold format %q: must be Text or Background", v)
|
||||
}
|
||||
}
|
||||
|
||||
// ComparisonOperator: ">" | "<" | ">=" | "<=" | "=".
|
||||
type ComparisonOperator string
|
||||
|
||||
const (
|
||||
ComparisonOperatorGT ComparisonOperator = ">"
|
||||
ComparisonOperatorLT ComparisonOperator = "<"
|
||||
ComparisonOperatorGTE ComparisonOperator = ">="
|
||||
ComparisonOperatorLTE ComparisonOperator = "<="
|
||||
ComparisonOperatorEQ ComparisonOperator = "="
|
||||
)
|
||||
|
||||
func (o *ComparisonOperator) UnmarshalJSON(data []byte) error {
|
||||
var v string
|
||||
if err := json.Unmarshal(data, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch ComparisonOperator(v) {
|
||||
case ComparisonOperatorGT, ComparisonOperatorLT, ComparisonOperatorGTE, ComparisonOperatorLTE, ComparisonOperatorEQ:
|
||||
*o = ComparisonOperator(v)
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid comparison operator %q", v)
|
||||
}
|
||||
}
|
||||
|
||||
// SpanGaps: bool | number. Default is true.
|
||||
// When true, lines connect across null values. When false, lines break at nulls.
|
||||
// When a number, gaps smaller than that threshold (in seconds) are connected.
|
||||
type SpanGaps struct {
|
||||
value any
|
||||
}
|
||||
|
||||
func (sg SpanGaps) Value() any {
|
||||
if sg.value == nil {
|
||||
return true
|
||||
}
|
||||
return sg.value
|
||||
}
|
||||
|
||||
func (sg *SpanGaps) UnmarshalJSON(data []byte) error {
|
||||
var b bool
|
||||
if err := json.Unmarshal(data, &b); err == nil {
|
||||
sg.value = b
|
||||
return nil
|
||||
}
|
||||
var n float64
|
||||
if err := json.Unmarshal(data, &n); err == nil {
|
||||
if n < 0 {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid spanGaps %v: numeric value must be non-negative", n)
|
||||
}
|
||||
sg.value = n
|
||||
return nil
|
||||
}
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid spanGaps: must be a bool or a non-negative number")
|
||||
}
|
||||
|
||||
func (sg SpanGaps) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(sg.Value())
|
||||
}
|
||||
|
||||
// PrecisionOption: 0 | 1 | 2 | 3 | 4 | "full". Default is 2.
|
||||
type PrecisionOption struct {
|
||||
value any
|
||||
}
|
||||
|
||||
const PrecisionOptionFull = "full"
|
||||
|
||||
func (p PrecisionOption) Value() any {
|
||||
if p.value == nil {
|
||||
return 2
|
||||
}
|
||||
return p.value
|
||||
}
|
||||
|
||||
func (p *PrecisionOption) UnmarshalJSON(data []byte) error {
|
||||
var s string
|
||||
if err := json.Unmarshal(data, &s); err == nil {
|
||||
if s != PrecisionOptionFull {
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid precision option %q: string value must be %q", s, PrecisionOptionFull)
|
||||
}
|
||||
p.value = s
|
||||
return nil
|
||||
}
|
||||
var n int
|
||||
if err := json.Unmarshal(data, &n); err == nil {
|
||||
switch n {
|
||||
case 0, 1, 2, 3, 4:
|
||||
p.value = n
|
||||
return nil
|
||||
default:
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid precision option %d: must be 0, 1, 2, 3, or 4", n)
|
||||
}
|
||||
}
|
||||
return errors.NewInvalidInputf(ErrCodeDashboardInvalidInput, "invalid precision option: must be an int (0-4) or \"full\"")
|
||||
}
|
||||
|
||||
func (p PrecisionOption) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(p.Value())
|
||||
}
|
||||
871
pkg/types/dashboardtypes/testdata/perses.json
vendored
Normal file
871
pkg/types/dashboardtypes/testdata/perses.json
vendored
Normal file
@@ -0,0 +1,871 @@
|
||||
{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {
|
||||
"name": "the-everything-dashboard",
|
||||
"project": "signoz"
|
||||
},
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "The everything dashboard",
|
||||
"description": "Trying to cover as many concepts here as possible"
|
||||
},
|
||||
"duration": "1h",
|
||||
"datasources": {
|
||||
"SigNozDatasource": {
|
||||
"default": true,
|
||||
"plugin": {
|
||||
"kind": "SigNozDatasource",
|
||||
"spec": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"variables": [
|
||||
{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "serviceName",
|
||||
"display": {
|
||||
"name": "serviceName"
|
||||
},
|
||||
"allowAllValue": true,
|
||||
"allowMultiple": false,
|
||||
"sort": "none",
|
||||
"plugin": {
|
||||
"kind": "SigNozDynamicVariable",
|
||||
"spec": {
|
||||
"name": "service.name",
|
||||
"source": "Metrics"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "statusCodesFromQuery",
|
||||
"display": {
|
||||
"name": "statusCodesFromQuery"
|
||||
},
|
||||
"allowAllValue": true,
|
||||
"allowMultiple": true,
|
||||
"sort": "alphabetical-asc",
|
||||
"plugin": {
|
||||
"kind": "SigNozQueryVariable",
|
||||
"spec": {
|
||||
"queryValue": "SELECT JSONExtractString(labels, 'http.status_code') AS status_code FROM signoz_metrics.distributed_time_series_v4_1day WHERE status_code != '' GROUP BY status_code"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "ListVariable",
|
||||
"spec": {
|
||||
"name": "limit",
|
||||
"display": {
|
||||
"name": "limit"
|
||||
},
|
||||
"allowAllValue": false,
|
||||
"allowMultiple": false,
|
||||
"sort": "none",
|
||||
"plugin": {
|
||||
"kind": "SigNozCustomVariable",
|
||||
"spec": {
|
||||
"customValue": "1,10,20,40,80,160,200"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "TextVariable",
|
||||
"spec": {
|
||||
"name": "textboxvar",
|
||||
"display": {
|
||||
"name": "textboxvar"
|
||||
},
|
||||
"value": "defaultvaluegoeshere",
|
||||
"plugin": {
|
||||
"kind": "SigNozTextboxVariable",
|
||||
"spec": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"panels": {
|
||||
"24e2697b": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "total resp size",
|
||||
"description": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {
|
||||
"visualization": {
|
||||
"fillSpans": true
|
||||
},
|
||||
"formatting": {
|
||||
"unit": "By",
|
||||
"decimalPrecision": 3
|
||||
},
|
||||
"axes": {
|
||||
"softMax": 800,
|
||||
"isLogScale": true
|
||||
},
|
||||
"legend": {
|
||||
"position": "right",
|
||||
"customColors": {
|
||||
"{service.name=\"sampleapp-gateway\"}": "#9ea5f7"
|
||||
}
|
||||
},
|
||||
"thresholds": [
|
||||
{
|
||||
"value": 1024,
|
||||
"unit": "By",
|
||||
"color": "Red",
|
||||
"label": "upper limit"
|
||||
},
|
||||
{
|
||||
"value": 100,
|
||||
"unit": "By",
|
||||
"color": "Orange",
|
||||
"label": "kinda bad"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"links": [
|
||||
{
|
||||
"name": "View service details",
|
||||
"url": "http://localhost:8080/{{_service.name}}?dfddf=%7B%7Blimit%7D%7D"
|
||||
}
|
||||
],
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "http.server.response.body.size.sum",
|
||||
"reduceTo": "sum",
|
||||
"spaceAggregation": "sum",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "http.response.status_code IN $statusCodesFromQuery"
|
||||
},
|
||||
"groupBy": [
|
||||
{
|
||||
"name": "service.name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "tag"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ff2f72f1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "fraction of calls",
|
||||
"description": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {
|
||||
"visualization": {
|
||||
"fillSpans": true
|
||||
},
|
||||
"formatting": {
|
||||
"decimalPrecision": 1
|
||||
},
|
||||
"thresholds": [
|
||||
{
|
||||
"value": 1,
|
||||
"color": "Blue",
|
||||
"label": "max possible"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozCompositeQuery",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"disabled": true,
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "signoz_calls_total",
|
||||
"reduceTo": "sum",
|
||||
"spaceAggregation": "sum",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name IN $serviceName AND http.status_code IN $statusCodesFromQuery"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "B",
|
||||
"signal": "metrics",
|
||||
"disabled": true,
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "signoz_calls_total",
|
||||
"reduceTo": "sum",
|
||||
"spaceAggregation": "sum",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name in $serviceName"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "builder_formula",
|
||||
"spec": {
|
||||
"name": "F1",
|
||||
"expression": "A / B"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"011605e7": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "total resp size"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozBarChartPanel",
|
||||
"spec": {
|
||||
"visualization": {
|
||||
"stackedBarChart": false
|
||||
},
|
||||
"formatting": {
|
||||
"unit": "By"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "http.server.response.body.size.sum",
|
||||
"reduceTo": "sum",
|
||||
"spaceAggregation": "sum",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "http.response.status_code IN $statusCodesFromQuery"
|
||||
},
|
||||
"groupBy": [
|
||||
{
|
||||
"name": "service.name",
|
||||
"fieldDataType": "string",
|
||||
"fieldContext": "tag"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"e23516fc": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "num traces for service"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozNumberPanel",
|
||||
"spec": {
|
||||
"formatting": {
|
||||
"unit": "none",
|
||||
"decimalPrecision": 1
|
||||
},
|
||||
"thresholds": [
|
||||
{
|
||||
"value": 1200000,
|
||||
"operator": ">",
|
||||
"unit": "none",
|
||||
"color": "Red",
|
||||
"format": "Text"
|
||||
},
|
||||
{
|
||||
"value": 1200000,
|
||||
"operator": "<=",
|
||||
"unit": "none",
|
||||
"color": "Green",
|
||||
"format": "Text"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count() "
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name = $serviceName "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"130c8d6b": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "num logs for service"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozNumberPanel",
|
||||
"spec": {
|
||||
"formatting": {
|
||||
"unit": "none",
|
||||
"decimalPrecision": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count() "
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name = $serviceName "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"246f7c6d": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "num traces for service per resp code"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozPieChartPanel",
|
||||
"spec": {
|
||||
"formatting": {
|
||||
"decimalPrecision": 1
|
||||
},
|
||||
"legend": {
|
||||
"customColors": {
|
||||
"\"201\"": "#2bc051",
|
||||
"\"400\"": "#cc462e",
|
||||
"\"500\"": "#ff0000"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count() "
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name = $serviceName isEntryPoint = 'true'"
|
||||
},
|
||||
"groupBy": [
|
||||
{
|
||||
"name": "http.response.status_code",
|
||||
"fieldDataType": "float64",
|
||||
"fieldContext": "tag"
|
||||
}
|
||||
],
|
||||
"legend": "\"{{http.response.status_code}}\""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"21f7d4d0": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "average latency per service"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTablePanel",
|
||||
"spec": {
|
||||
"formatting": {
|
||||
"columnUnits": {
|
||||
"A": "s"
|
||||
}
|
||||
},
|
||||
"thresholds": [
|
||||
{
|
||||
"value": 1,
|
||||
"operator": ">",
|
||||
"unit": "min",
|
||||
"color": "Red",
|
||||
"format": "Text",
|
||||
"columnName": "A"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozClickHouseSQL",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"query": "WITH\n __spatial_aggregation_cte AS\n (\n SELECT\n toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60)) AS ts,\n `service.name`,\n le,\n sum(value) / 60 AS value\n FROM signoz_metrics.distributed_samples_v4 AS points\n INNER JOIN\n (\n SELECT\n fingerprint,\n JSONExtractString(labels, 'service.name') AS `service.name`,\n JSONExtractString(labels, 'le') AS le\n FROM signoz_metrics.time_series_v4\n WHERE (metric_name IN ('signoz_latency.bucket')) AND (LOWER(temporality) LIKE LOWER('delta')) AND (__normalized = 0)\n GROUP BY\n fingerprint,\n `service.name`,\n le\n ) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint\n WHERE metric_name IN ('signoz_latency.bucket')\n GROUP BY\n ts,\n `service.name`,\n le\n ),\n __histogramCTE AS\n (\n SELECT\n ts,\n `service.name`,\n histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.9) AS value\n FROM __spatial_aggregation_cte\n GROUP BY\n `service.name`,\n ts\n ORDER BY\n `service.name` ASC,\n ts ASC\n )\nSELECT\n `service.name` AS service,\n avg(value) AS A\nFROM __histogramCTE\nGROUP BY `service.name`"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"ad5fd556": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "logs from service"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozListPanel",
|
||||
"spec": {
|
||||
"selectedLogFields": [
|
||||
{
|
||||
"name": "timestamp",
|
||||
"type": "log",
|
||||
"dataType": ""
|
||||
},
|
||||
{
|
||||
"name": "body",
|
||||
"type": "log",
|
||||
"dataType": ""
|
||||
},
|
||||
{
|
||||
"name": "error",
|
||||
"type": "",
|
||||
"dataType": "string"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "LogQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count() "
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name = $serviceName"
|
||||
},
|
||||
"groupBy": [],
|
||||
"order": [
|
||||
{
|
||||
"key": {
|
||||
"name": "timestamp"
|
||||
},
|
||||
"direction": "desc"
|
||||
},
|
||||
{
|
||||
"key": {
|
||||
"name": "id"
|
||||
},
|
||||
"direction": "desc"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"f07b59ee": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "response size buckets"
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozHistogramPanel",
|
||||
"spec": {
|
||||
"histogramBuckets": {
|
||||
"bucketCount": 60,
|
||||
"mergeAllActiveQueries": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "http.server.response.body.size.bucket",
|
||||
"reduceTo": "avg",
|
||||
"spaceAggregation": "p90",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"e1a41831": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "trace operator",
|
||||
"description": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {
|
||||
"legend": {
|
||||
"position": "right"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozCompositeQuery",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count() "
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "service.name = 'sampleapp-gateway' "
|
||||
},
|
||||
"legend": "Gateway"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "builder_query",
|
||||
"spec": {
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count() "
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": "http.response.status_code = 200"
|
||||
},
|
||||
"legend": "$serviceName"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "builder_trace_operator",
|
||||
"spec": {
|
||||
"name": "T1",
|
||||
"aggregations": [
|
||||
{
|
||||
"expression": "count()",
|
||||
"alias": "request_count"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"f0d70491": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "no results in this promql",
|
||||
"description": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozCompositeQuery",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"type": "promql",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"query": "sum(rate(flask_exporter_info[5m]))"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "promql",
|
||||
"spec": {
|
||||
"name": "B",
|
||||
"query": "sum(increase(flask_exporter_info[5m]))"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"0e6eb4ca": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "no results in this promql",
|
||||
"description": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozPromQLQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"query": "sum(rate(flask_exporter_info[5m]))"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": [
|
||||
{
|
||||
"kind": "Grid",
|
||||
"spec": {
|
||||
"items": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/24e2697b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 6,
|
||||
"y": 0,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/ff2f72f1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 6,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/011605e7"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 6,
|
||||
"y": 6,
|
||||
"width": 6,
|
||||
"height": 3,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/e23516fc"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 6,
|
||||
"y": 9,
|
||||
"width": 6,
|
||||
"height": 3,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/130c8d6b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 12,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/246f7c6d"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 6,
|
||||
"y": 12,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/21f7d4d0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 18,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/ad5fd556"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 6,
|
||||
"y": 18,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/f07b59ee"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 24,
|
||||
"width": 12,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/e1a41831"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 30,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/f0d70491"
|
||||
}
|
||||
},
|
||||
{
|
||||
"x": 6,
|
||||
"y": 30,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/0e6eb4ca"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
167
pkg/types/dashboardtypes/testdata/perses_with_sections.json
vendored
Normal file
167
pkg/types/dashboardtypes/testdata/perses_with_sections.json
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
{
|
||||
"kind": "Dashboard",
|
||||
"metadata": {
|
||||
"name": "nv-dashboard-with-sections",
|
||||
"project": "signoz"
|
||||
},
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": "NV dashboard with sections",
|
||||
"description": ""
|
||||
},
|
||||
"datasources": {
|
||||
"SigNozDatasource": {
|
||||
"default": true,
|
||||
"plugin": {
|
||||
"kind": "SigNozDatasource",
|
||||
"spec": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"panels": {
|
||||
"b424e23b": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozNumberPanel",
|
||||
"spec": {
|
||||
"formatting": {
|
||||
"unit": "s",
|
||||
"decimalPrecision": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "container.cpu.time",
|
||||
"reduceTo": "sum",
|
||||
"spaceAggregation": "sum",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"251df4d5": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"display": {
|
||||
"name": ""
|
||||
},
|
||||
"plugin": {
|
||||
"kind": "SigNozTimeSeriesPanel",
|
||||
"spec": {
|
||||
"visualization": {
|
||||
"fillSpans": false
|
||||
},
|
||||
"formatting": {
|
||||
"unit": "recommendations",
|
||||
"decimalPrecision": 2
|
||||
},
|
||||
"chartAppearance": {
|
||||
"lineInterpolation": "spline",
|
||||
"showPoints": false,
|
||||
"lineStyle": "solid",
|
||||
"fillMode": "none",
|
||||
"spanGaps": true
|
||||
},
|
||||
"legend": {
|
||||
"position": "bottom"
|
||||
}
|
||||
}
|
||||
},
|
||||
"queries": [
|
||||
{
|
||||
"kind": "TimeSeriesQuery",
|
||||
"spec": {
|
||||
"plugin": {
|
||||
"kind": "SigNozBuilderQuery",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": [
|
||||
{
|
||||
"metricName": "app_recommendations_counter",
|
||||
"reduceTo": "sum",
|
||||
"spaceAggregation": "sum",
|
||||
"timeAggregation": "rate"
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"expression": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"layouts": [
|
||||
{
|
||||
"kind": "Grid",
|
||||
"spec": {
|
||||
"display": {
|
||||
"title": "Bravo",
|
||||
"collapse": {
|
||||
"open": true
|
||||
}
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/b424e23b"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "Grid",
|
||||
"spec": {
|
||||
"display": {
|
||||
"title": "Alpha",
|
||||
"collapse": {
|
||||
"open": true
|
||||
}
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"width": 6,
|
||||
"height": 6,
|
||||
"content": {
|
||||
"$ref": "#/spec/panels/251df4d5"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -91,6 +91,16 @@ func (f QueryBuilderFormula) Validate() error {
|
||||
)
|
||||
}
|
||||
|
||||
// Validate expression is parseable
|
||||
if _, err := govaluate.NewEvaluableExpressionWithFunctions(f.Expression, EvalFuncs()); err != nil {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"failed to parse expression for formula query %q: %s",
|
||||
f.Name,
|
||||
err.Error(),
|
||||
)
|
||||
}
|
||||
|
||||
// Validate functions if present
|
||||
for i, fn := range f.Functions {
|
||||
if err := fn.Validate(); err != nil {
|
||||
|
||||
@@ -305,7 +305,7 @@ func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
|
||||
func (r *QueryRangeRequest) StepIntervalForQuery(name string) (int64, error) {
|
||||
stepsMap := make(map[string]int64)
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
switch spec := query.Spec.(type) {
|
||||
@@ -317,11 +317,13 @@ func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
|
||||
stepsMap[spec.Name] = spec.StepInterval.Milliseconds()
|
||||
case PromQuery:
|
||||
stepsMap[spec.Name] = spec.Step.Milliseconds()
|
||||
case QueryBuilderTraceOperator:
|
||||
stepsMap[spec.Name] = spec.StepInterval.Milliseconds()
|
||||
}
|
||||
}
|
||||
|
||||
if step, ok := stepsMap[name]; ok {
|
||||
return step
|
||||
return step, nil
|
||||
}
|
||||
|
||||
exprStr := ""
|
||||
@@ -335,12 +337,15 @@ func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
|
||||
}
|
||||
}
|
||||
|
||||
expression, _ := govaluate.NewEvaluableExpressionWithFunctions(exprStr, EvalFuncs())
|
||||
expression, err := govaluate.NewEvaluableExpressionWithFunctions(exprStr, EvalFuncs())
|
||||
if err != nil {
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse expression for formula query %q: %s", name, err.Error())
|
||||
}
|
||||
steps := []int64{}
|
||||
for _, v := range expression.Vars() {
|
||||
steps = append(steps, stepsMap[v])
|
||||
}
|
||||
return LCMList(steps)
|
||||
return LCMList(steps), nil
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) NumAggregationForQuery(name string) int64 {
|
||||
|
||||
@@ -1798,3 +1798,108 @@ func TestQueryRangeRequest_GetQueriesSupportingZeroDefault(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryRangeRequest_StepIntervalForQuery(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
request QueryRangeRequest
|
||||
queryName string
|
||||
wantStep int64
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "trace operator returns its step interval",
|
||||
request: QueryRangeRequest{
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: Step{Duration: 60 * time.Second},
|
||||
Aggregations: []TraceAggregation{{Expression: "count()"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeTraceOperator,
|
||||
Spec: QueryBuilderTraceOperator{
|
||||
Name: "Trace Operator",
|
||||
StepInterval: Step{Duration: 120 * time.Second},
|
||||
Expression: "A",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
queryName: "Trace Operator",
|
||||
wantStep: 120000,
|
||||
},
|
||||
{
|
||||
name: "formula computes LCM of referenced query steps",
|
||||
request: QueryRangeRequest{
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: Step{Duration: 60 * time.Second},
|
||||
Aggregations: []TraceAggregation{{Expression: "count()"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
StepInterval: Step{Duration: 90 * time.Second},
|
||||
Aggregations: []TraceAggregation{{Expression: "count()"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "A + B",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
queryName: "F1",
|
||||
wantStep: 180000, // LCM of 60s and 90s = 180s
|
||||
},
|
||||
{
|
||||
name: "invalid formula expression returns error",
|
||||
request: QueryRangeRequest{
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "A +",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
queryName: "F1",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := tt.request.StepIntervalForQuery(tt.queryName)
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.wantStep, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -496,6 +496,19 @@ func (r *QueryRangeRequest) Validate(opts ...ValidationOption) error {
|
||||
)
|
||||
}
|
||||
|
||||
// raw/trace request types don't support metric queries;
|
||||
// metrics are always aggregated and there is no raw form.
|
||||
if r.RequestType == RequestTypeRaw || r.RequestType == RequestTypeRawStream || r.RequestType == RequestTypeTrace {
|
||||
for _, envelope := range r.CompositeQuery.Queries {
|
||||
if envelope.GetSignal() == telemetrytypes.SignalMetrics {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"raw request type is not supported for metric queries",
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate composite query
|
||||
if err := r.CompositeQuery.Validate(opts...); err != nil {
|
||||
return err
|
||||
@@ -584,13 +597,7 @@ func validateQueryEnvelope(envelope QueryEnvelope, opts ...ValidationOption) err
|
||||
"invalid formula spec",
|
||||
)
|
||||
}
|
||||
if spec.Expression == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"formula expression is required",
|
||||
)
|
||||
}
|
||||
return nil
|
||||
return spec.Validate()
|
||||
case QueryTypeJoin:
|
||||
_, ok := envelope.Spec.(QueryBuilderJoin)
|
||||
if !ok {
|
||||
|
||||
@@ -518,7 +518,7 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "expression is required",
|
||||
errMsg: "expression cannot be blank",
|
||||
},
|
||||
{
|
||||
name: "promql with empty query should return error",
|
||||
@@ -665,6 +665,57 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "raw request with metric query should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeRaw,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[MetricAggregation]{
|
||||
Name: "A",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []MetricAggregation{},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "A",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "raw request type is not supported for metric queries",
|
||||
},
|
||||
{
|
||||
name: "raw request with log query without aggregations should pass",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeRaw,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
@@ -733,7 +784,7 @@ func TestValidateQueryEnvelope(t *testing.T) {
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: true,
|
||||
errMsg: "expression is required",
|
||||
errMsg: "expression cannot be blank",
|
||||
},
|
||||
{
|
||||
name: "valid join spec",
|
||||
|
||||
@@ -42,7 +42,6 @@ type User struct {
|
||||
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
|
||||
IsRoot bool `bun:"is_root" json:"isRoot"`
|
||||
Status valuer.String `bun:"status" json:"status"`
|
||||
DeletedAt time.Time `bun:"deleted_at" json:"-"`
|
||||
TimeAuditable
|
||||
}
|
||||
|
||||
@@ -136,7 +135,6 @@ func NewUserFromDeprecatedUser(deprecatedUser *DeprecatedUser) *User {
|
||||
OrgID: deprecatedUser.OrgID,
|
||||
IsRoot: deprecatedUser.IsRoot,
|
||||
Status: deprecatedUser.Status,
|
||||
DeletedAt: deprecatedUser.DeletedAt,
|
||||
TimeAuditable: deprecatedUser.TimeAuditable,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,10 @@ USER_EDITOR_NAME = "editor"
|
||||
USER_EDITOR_EMAIL = "editor@integration.test"
|
||||
USER_EDITOR_PASSWORD = "password123Z$"
|
||||
|
||||
USER_VIEWER_NAME = "viewer"
|
||||
USER_VIEWER_EMAIL = "viewer@integration.test"
|
||||
USER_VIEWER_PASSWORD = "password123Z$"
|
||||
|
||||
|
||||
@pytest.fixture(name="create_user_admin", scope="package")
|
||||
def create_user_admin(
|
||||
|
||||
115
tests/integration/fixtures/authutils.py
Normal file
115
tests/integration/fixtures/authutils.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""Reusable helpers for user API tests."""
|
||||
|
||||
from http import HTTPStatus
|
||||
from typing import Dict
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
|
||||
USERS_BASE = "/api/v2/users"
|
||||
|
||||
|
||||
def create_active_user(
|
||||
signoz: types.SigNoz,
|
||||
admin_token: str,
|
||||
email: str,
|
||||
role: str,
|
||||
password: str,
|
||||
name: str = "",
|
||||
) -> str:
|
||||
"""Invite a user and activate via resetPassword. Returns user ID."""
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": email, "role": role, "name": name},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
invited_user = response.json()["data"]
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
|
||||
json={"password": password, "token": invited_user["token"]},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT, response.text
|
||||
|
||||
return invited_user["id"]
|
||||
|
||||
|
||||
def find_user_by_email(signoz: types.SigNoz, token: str, email: str) -> Dict:
|
||||
"""Find a user by email from the user list. Raises AssertionError if not found."""
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(USERS_BASE),
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK, response.text
|
||||
user = next((u for u in response.json()["data"] if u["email"] == email), None)
|
||||
assert user is not None, f"User with email '{email}' not found"
|
||||
return user
|
||||
|
||||
|
||||
def find_user_with_roles_by_email(signoz: types.SigNoz, token: str, email: str) -> Dict:
|
||||
"""Find a user by email and return UserWithRoles (user fields + userRoles).
|
||||
|
||||
Raises AssertionError if the user is not found.
|
||||
"""
|
||||
user = find_user_by_email(signoz, token, email)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"{USERS_BASE}/{user['id']}"),
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK, response.text
|
||||
return response.json()["data"]
|
||||
|
||||
|
||||
def assert_user_has_role(data: Dict, role_name: str) -> None:
|
||||
"""Assert that a UserWithRoles response contains the expected managed role."""
|
||||
role_names = {ur["role"]["name"] for ur in data.get("userRoles", [])}
|
||||
assert role_name in role_names, f"Expected role '{role_name}' in {role_names}"
|
||||
|
||||
|
||||
def change_user_role(
|
||||
signoz: types.SigNoz,
|
||||
admin_token: str,
|
||||
user_id: str,
|
||||
old_role: str,
|
||||
new_role: str,
|
||||
) -> None:
|
||||
"""Change a user's role (remove old, assign new).
|
||||
|
||||
Role names should be managed role names (e.g. signoz-editor).
|
||||
"""
|
||||
# Get current roles to find the old role's ID
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"{USERS_BASE}/{user_id}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK, response.text
|
||||
roles = response.json()["data"]
|
||||
|
||||
old_role_entry = next((r for r in roles if r["name"] == old_role), None)
|
||||
assert old_role_entry is not None, f"User does not have role '{old_role}'"
|
||||
|
||||
# Remove old role
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"{USERS_BASE}/{user_id}/roles/{old_role_entry['id']}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT, response.text
|
||||
|
||||
# Assign new role
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(f"{USERS_BASE}/{user_id}/roles"),
|
||||
json={"name": new_role},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK, response.text
|
||||
@@ -12,9 +12,12 @@ from fixtures.auth import (
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.authutils import (
|
||||
assert_user_has_role,
|
||||
find_user_with_roles_by_email,
|
||||
)
|
||||
from fixtures.idputils import (
|
||||
get_saml_domain,
|
||||
get_user_by_email,
|
||||
perform_saml_login,
|
||||
)
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
@@ -131,26 +134,10 @@ def test_saml_authn(
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Assert that the user was created in signoz.
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
found_user = find_user_with_roles_by_email(
|
||||
signoz, admin_token, "viewer@saml.integration.test"
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in user_response
|
||||
if user["email"] == "viewer@saml.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_idp_initiated_saml_authn(
|
||||
@@ -182,26 +169,10 @@ def test_idp_initiated_saml_authn(
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Assert that the user was created in signoz.
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
found_user = find_user_with_roles_by_email(
|
||||
signoz, admin_token, "viewer.idp.initiated@saml.integration.test"
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in user_response
|
||||
if user["email"] == "viewer.idp.initiated@saml.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_saml_update_domain_with_group_mappings(
|
||||
@@ -268,10 +239,9 @@ def test_saml_role_mapping_single_group_admin(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_saml_role_mapping_single_group_editor(
|
||||
@@ -294,10 +264,9 @@ def test_saml_role_mapping_single_group_editor(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
|
||||
def test_saml_role_mapping_multiple_groups_highest_wins(
|
||||
@@ -324,10 +293,9 @@ def test_saml_role_mapping_multiple_groups_highest_wins(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
|
||||
def test_saml_role_mapping_explicit_viewer_group(
|
||||
@@ -351,10 +319,9 @@ def test_saml_role_mapping_explicit_viewer_group(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_saml_role_mapping_unmapped_group_uses_default(
|
||||
@@ -377,10 +344,9 @@ def test_saml_role_mapping_unmapped_group_uses_default(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_saml_update_domain_with_use_role_claim(
|
||||
@@ -454,10 +420,9 @@ def test_saml_role_mapping_role_claim_takes_precedence(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_saml_role_mapping_invalid_role_claim_fallback(
|
||||
@@ -484,10 +449,9 @@ def test_saml_role_mapping_invalid_role_claim_fallback(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
|
||||
def test_saml_role_mapping_case_insensitive(
|
||||
@@ -514,10 +478,9 @@ def test_saml_role_mapping_case_insensitive(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_saml_name_mapping(
|
||||
@@ -539,13 +502,12 @@ def test_saml_name_mapping(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert (
|
||||
found_user["displayName"] == "Jane"
|
||||
) # We are only mapping the first name here
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_saml_empty_name_fallback(
|
||||
@@ -567,10 +529,9 @@ def test_saml_empty_name_fallback(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_saml_sso_login_activates_pending_invite_user(
|
||||
@@ -610,10 +571,9 @@ def test_saml_sso_login_activates_pending_invite_user(
|
||||
)
|
||||
|
||||
# User should be active with VIEWER role from SSO
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
assert found_user is not None
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
assert found_user["status"] == "active"
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_saml_sso_deleted_user_gets_new_user_on_login(
|
||||
@@ -680,18 +640,26 @@ def test_saml_sso_deleted_user_gets_new_user_on_login(
|
||||
|
||||
# Verify a NEW active user was auto-provisioned via SSO
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in response.json()["data"]
|
||||
if user["email"] == email and user["id"] != user_id
|
||||
),
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
new_user = next(
|
||||
(user for user in users if user["email"] == email and user["id"] != user_id),
|
||||
None,
|
||||
)
|
||||
assert found_user is not None
|
||||
assert found_user["status"] == "active"
|
||||
assert found_user["role"] == "VIEWER" # default role from SSO domain config
|
||||
assert new_user is not None
|
||||
assert new_user["status"] == "active"
|
||||
# Fetch full user with roles to check the assigned role
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{new_user['id']}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
found_user = response.json()["data"]
|
||||
assert_user_has_role(
|
||||
found_user, "signoz-viewer"
|
||||
) # default role from SSO domain config
|
||||
|
||||
@@ -11,9 +11,12 @@ from fixtures.auth import (
|
||||
USER_ADMIN_PASSWORD,
|
||||
add_license,
|
||||
)
|
||||
from fixtures.authutils import (
|
||||
assert_user_has_role,
|
||||
find_user_with_roles_by_email,
|
||||
)
|
||||
from fixtures.idputils import (
|
||||
get_oidc_domain,
|
||||
get_user_by_email,
|
||||
perform_oidc_login,
|
||||
)
|
||||
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
|
||||
@@ -112,26 +115,10 @@ def test_oidc_authn(
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Assert that the user was created in signoz.
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
found_user = find_user_with_roles_by_email(
|
||||
signoz, admin_token, "viewer@oidc.integration.test"
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in user_response
|
||||
if user["email"] == "viewer@oidc.integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_oidc_update_domain_with_group_mappings(
|
||||
@@ -205,10 +192,9 @@ def test_oidc_role_mapping_single_group_admin(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_oidc_role_mapping_single_group_editor(
|
||||
@@ -231,10 +217,9 @@ def test_oidc_role_mapping_single_group_editor(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
|
||||
def test_oidc_role_mapping_multiple_groups_highest_wins(
|
||||
@@ -261,10 +246,9 @@ def test_oidc_role_mapping_multiple_groups_highest_wins(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_oidc_role_mapping_explicit_viewer_group(
|
||||
@@ -288,10 +272,9 @@ def test_oidc_role_mapping_explicit_viewer_group(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_oidc_role_mapping_unmapped_group_uses_default(
|
||||
@@ -314,10 +297,9 @@ def test_oidc_role_mapping_unmapped_group_uses_default(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
|
||||
def test_oidc_update_domain_with_use_role_claim(
|
||||
@@ -394,10 +376,9 @@ def test_oidc_role_mapping_role_claim_takes_precedence(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_oidc_role_mapping_invalid_role_claim_fallback(
|
||||
@@ -426,10 +407,9 @@ def test_oidc_role_mapping_invalid_role_claim_fallback(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
|
||||
def test_oidc_role_mapping_case_insensitive(
|
||||
@@ -456,10 +436,9 @@ def test_oidc_role_mapping_case_insensitive(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = get_user_by_email(signoz, admin_token, email)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
|
||||
def test_oidc_name_mapping(
|
||||
@@ -482,20 +461,11 @@ def test_oidc_name_mapping(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == email), None)
|
||||
|
||||
assert found_user is not None
|
||||
# Keycloak concatenates firstName + lastName into "name" claim
|
||||
assert found_user["displayName"] == "John Doe"
|
||||
assert found_user["role"] == "VIEWER" # Default role
|
||||
assert_user_has_role(found_user, "signoz-viewer") # Default role
|
||||
|
||||
|
||||
def test_oidc_empty_name_uses_fallback(
|
||||
@@ -518,19 +488,10 @@ def test_oidc_empty_name_uses_fallback(
|
||||
)
|
||||
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == email), None)
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
|
||||
# User should still be created even with empty name
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
# Note: displayName may be empty - this is a known limitation
|
||||
|
||||
|
||||
@@ -570,16 +531,7 @@ def test_oidc_sso_login_activates_pending_invite_user(
|
||||
signoz, idp, driver, get_session_context, idp_login, email, "password123"
|
||||
)
|
||||
|
||||
# User should be active with ADMIN role from invite, not VIEWER from SSO
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
found_user = next(
|
||||
(user for user in response.json()["data"] if user["email"] == email),
|
||||
None,
|
||||
)
|
||||
assert found_user is not None
|
||||
# User should be active with VIEWER role from SSO, not ADMIN from invite
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
|
||||
assert found_user["status"] == "active"
|
||||
assert found_user["role"] == "VIEWER"
|
||||
assert_user_has_role(found_user, "signoz-viewer")
|
||||
|
||||
@@ -4,6 +4,18 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
USER_EDITOR_EMAIL,
|
||||
USER_EDITOR_NAME,
|
||||
USER_EDITOR_PASSWORD,
|
||||
USER_VIEWER_EMAIL,
|
||||
)
|
||||
from fixtures.authutils import (
|
||||
assert_user_has_role,
|
||||
find_user_with_roles_by_email,
|
||||
)
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -58,8 +70,8 @@ def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
|
||||
"name": "admin",
|
||||
"orgId": "",
|
||||
"orgName": "integration.test",
|
||||
"email": "admin@integration.test",
|
||||
"password": "password123Z$",
|
||||
"email": USER_ADMIN_EMAIL,
|
||||
"password": USER_ADMIN_PASSWORD,
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
@@ -72,130 +84,73 @@ def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["setupCompleted"] is True
|
||||
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "admin@integration.test"),
|
||||
None,
|
||||
)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "ADMIN"
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user["id"]}"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["data"]["role"] == "ADMIN"
|
||||
# Verify admin user exists via v2
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, USER_ADMIN_EMAIL)
|
||||
assert_user_has_role(found_user, "signoz-admin")
|
||||
|
||||
|
||||
def test_invite_and_register(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
def test_invite(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
# Generate an invite token for the editor user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": "editor@integration.test", "role": "EDITOR", "name": "editor"},
|
||||
json={"email": USER_EDITOR_EMAIL, "role": "EDITOR", "name": USER_EDITOR_NAME},
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
|
||||
invited_user = response.json()["data"]
|
||||
assert invited_user["email"] == "editor@integration.test"
|
||||
assert invited_user["email"] == USER_EDITOR_EMAIL
|
||||
assert invited_user["role"] == "EDITOR"
|
||||
|
||||
# Verify the user user appears in the users list but as pending_invite status
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "editor@integration.test"),
|
||||
None,
|
||||
)
|
||||
assert found_user is not None
|
||||
# Verify the user appears in the users list but as pending_invite status
|
||||
found_user = find_user_with_roles_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
|
||||
assert found_user["status"] == "pending_invite"
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
|
||||
reset_token = invited_user["token"]
|
||||
|
||||
# Reset the password to complete the invite flow (activates the user and also grants authz)
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
|
||||
json={"password": "password123Z$", "token": reset_token},
|
||||
json={"password": USER_EDITOR_PASSWORD, "token": reset_token},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Verify the user can now log in
|
||||
editor_token = get_token("editor@integration.test", "password123Z$")
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
assert editor_token is not None
|
||||
|
||||
# Verify that an admin endpoint cannot be called by the editor user
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
# Verify that the editor user status has been updated to ACTIVE
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
|
||||
},
|
||||
admin_token_fresh = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
found_user = find_user_with_roles_by_email(
|
||||
signoz, admin_token_fresh, USER_EDITOR_EMAIL
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "editor@integration.test"),
|
||||
None,
|
||||
)
|
||||
|
||||
assert found_user is not None
|
||||
assert found_user["role"] == "EDITOR"
|
||||
assert found_user["displayName"] == "editor"
|
||||
assert found_user["email"] == "editor@integration.test"
|
||||
assert_user_has_role(found_user, "signoz-editor")
|
||||
assert found_user["displayName"] == USER_EDITOR_NAME
|
||||
assert found_user["email"] == USER_EDITOR_EMAIL
|
||||
assert found_user["status"] == "active"
|
||||
|
||||
|
||||
def test_revoke_invite_and_register(
|
||||
def test_revoke_invite(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Invite the viewer user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": "viewer@integration.test", "role": "VIEWER"},
|
||||
json={"email": USER_VIEWER_EMAIL, "role": "VIEWER"},
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
invited_user = response.json()["data"]
|
||||
reset_token = invited_user["token"]
|
||||
|
||||
@@ -216,30 +171,76 @@ def test_revoke_invite_and_register(
|
||||
assert response.status_code in (HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)
|
||||
|
||||
|
||||
def test_self_access(
|
||||
def test_provision_user(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
"""
|
||||
Simulates the upstream zeus provisioning flow:
|
||||
1. Invite a user as ADMIN (register already happened via test_register)
|
||||
2. List users to find the invited user's ID
|
||||
3. Get reset password token for that user
|
||||
4. Use the token to set the password and activate the user
|
||||
5. Verify the user can log in
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
provisioned_email = "zeus-provisioned@integration.test"
|
||||
provisioned_name = "zeus provisioned user"
|
||||
provisioned_password = "password123Z$"
|
||||
|
||||
# Step 1: Invite user as ADMIN (mirrors zeus inviteUserOnSigNoz)
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={
|
||||
"email": provisioned_email,
|
||||
"name": provisioned_name,
|
||||
"role": "ADMIN",
|
||||
},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
|
||||
# Step 2: List users to find the invited user's ID (mirrors zeus GET /api/v1/user)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
found_user = next((u for u in users if u["email"] == provisioned_email), None)
|
||||
assert found_user is not None
|
||||
user_id = found_user["id"]
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "editor@integration.test"),
|
||||
None,
|
||||
)
|
||||
|
||||
# Step 3: Get reset password token (mirrors zeus GET /api/v1/getResetPasswordToken/{id})
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user['id']}"),
|
||||
timeout=2,
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v1/getResetPasswordToken/{user_id}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["data"]["role"] == "EDITOR"
|
||||
reset_token = response.json()["data"]["token"]
|
||||
assert reset_token is not None
|
||||
assert reset_token != ""
|
||||
|
||||
# Step 4: Use the token to set password and activate user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
|
||||
json={"password": provisioned_password, "token": reset_token},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Step 5: Verify the provisioned user can log in and is active with admin role
|
||||
user_token = get_token(provisioned_email, provisioned_password)
|
||||
assert user_token is not None
|
||||
|
||||
provisioned_user = find_user_with_roles_by_email(
|
||||
signoz, admin_token, provisioned_email
|
||||
)
|
||||
assert provisioned_user["status"] == "active"
|
||||
assert provisioned_user["displayName"] == provisioned_name
|
||||
assert_user_has_role(provisioned_user, "signoz-admin")
|
||||
|
||||
@@ -5,56 +5,45 @@ import requests
|
||||
from sqlalchemy import sql
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.authutils import find_user_by_email
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
PASSWORD_USER_EMAIL = "admin+password@integration.test"
|
||||
PASSWORD_USER_PASSWORD = "password123Z$"
|
||||
|
||||
|
||||
def test_change_password(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create another admin user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": "admin+password@integration.test", "role": "ADMIN"},
|
||||
json={"email": PASSWORD_USER_EMAIL, "role": "ADMIN"},
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
invited_user = response.json()["data"]
|
||||
reset_token = invited_user["token"]
|
||||
|
||||
# Reset password to activate user
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
|
||||
json={"password": "password123Z$", "token": reset_token},
|
||||
json={"password": PASSWORD_USER_PASSWORD, "token": reset_token},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Get the user id
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in user_response
|
||||
if user["email"] == "admin+password@integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Get the user id via v2
|
||||
found_user = find_user_by_email(signoz, admin_token, PASSWORD_USER_EMAIL)
|
||||
|
||||
# Try logging in with the password
|
||||
token = get_token("admin+password@integration.test", "password123Z$")
|
||||
token = get_token(PASSWORD_USER_EMAIL, PASSWORD_USER_PASSWORD)
|
||||
assert token is not None
|
||||
|
||||
# Try changing the password with a bad old password which should fail
|
||||
@@ -65,7 +54,7 @@ def test_change_password(
|
||||
json={
|
||||
"userId": f"{found_user['id']}",
|
||||
"oldPassword": "password",
|
||||
"newPassword": "password123Z$",
|
||||
"newPassword": PASSWORD_USER_PASSWORD,
|
||||
},
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
@@ -80,7 +69,7 @@ def test_change_password(
|
||||
),
|
||||
json={
|
||||
"userId": f"{found_user['id']}",
|
||||
"oldPassword": "password123Z$",
|
||||
"oldPassword": PASSWORD_USER_PASSWORD,
|
||||
"newPassword": "password123Znew$",
|
||||
},
|
||||
timeout=2,
|
||||
@@ -90,33 +79,17 @@ def test_change_password(
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Try logging in with the new password
|
||||
token = get_token("admin+password@integration.test", "password123Znew$")
|
||||
token = get_token(PASSWORD_USER_EMAIL, "password123Znew$")
|
||||
assert token is not None
|
||||
|
||||
|
||||
def test_reset_password(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get the user id for admin+password@integration.test
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in user_response
|
||||
if user["email"] == "admin+password@integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Get the user id via v2
|
||||
found_user = find_user_by_email(signoz, admin_token, PASSWORD_USER_EMAIL)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
@@ -148,33 +121,17 @@ def test_reset_password(
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
token = get_token("admin+password@integration.test", "password123Z$NEWNEW#!")
|
||||
token = get_token(PASSWORD_USER_EMAIL, "password123Z$NEWNEW#!")
|
||||
assert token is not None
|
||||
|
||||
|
||||
def test_reset_password_with_no_password(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get the user id for admin+password@integration.test
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(
|
||||
user
|
||||
for user in user_response
|
||||
if user["email"] == "admin+password@integration.test"
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Get the user id via v2
|
||||
found_user = find_user_by_email(signoz, admin_token, PASSWORD_USER_EMAIL)
|
||||
|
||||
with signoz.sqlstore.conn.connect() as conn:
|
||||
result = conn.execute(
|
||||
@@ -205,7 +162,7 @@ def test_reset_password_with_no_password(
|
||||
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
token = get_token("admin+password@integration.test", "FINALPASSword123!#[")
|
||||
token = get_token(PASSWORD_USER_EMAIL, "FINALPASSword123!#[")
|
||||
assert token is not None
|
||||
|
||||
|
||||
@@ -220,7 +177,7 @@ def test_forgot_password_returns_204_for_nonexistent_email(
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
|
||||
params={
|
||||
"email": "admin@integration.test",
|
||||
"email": USER_ADMIN_EMAIL,
|
||||
"ref": f"{signoz.self.host_configs['8080'].base()}",
|
||||
},
|
||||
timeout=5,
|
||||
@@ -253,20 +210,22 @@ def test_forgot_password_creates_reset_token(
|
||||
3. Use the token to reset password
|
||||
4. Verify user can login with new password
|
||||
"""
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
forgot_email = "forgot@integration.test"
|
||||
|
||||
# Create a user specifically for testing forgot password
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={
|
||||
"email": "forgot@integration.test",
|
||||
"email": forgot_email,
|
||||
"role": "EDITOR",
|
||||
"name": "forgotpassword user",
|
||||
},
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
|
||||
invited_user = response.json()["data"]
|
||||
reset_token = invited_user["token"]
|
||||
@@ -283,7 +242,7 @@ def test_forgot_password_creates_reset_token(
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
|
||||
params={
|
||||
"email": "forgot@integration.test",
|
||||
"email": forgot_email,
|
||||
"ref": f"{signoz.self.host_configs['8080'].base()}",
|
||||
},
|
||||
timeout=5,
|
||||
@@ -295,7 +254,7 @@ def test_forgot_password_creates_reset_token(
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/factor_password/forgot"),
|
||||
json={
|
||||
"email": "forgot@integration.test",
|
||||
"email": forgot_email,
|
||||
"orgId": org_id,
|
||||
"frontendBaseURL": signoz.self.host_configs["8080"].base(),
|
||||
},
|
||||
@@ -304,19 +263,7 @@ def test_forgot_password_creates_reset_token(
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Verify reset password token was created by querying the database
|
||||
# First, get the user ID
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "forgot@integration.test"),
|
||||
None,
|
||||
)
|
||||
assert found_user is not None
|
||||
found_user = find_user_by_email(signoz, admin_token, forgot_email)
|
||||
|
||||
reset_token = None
|
||||
# Query the database directly to get the reset password token
|
||||
@@ -325,7 +272,7 @@ def test_forgot_password_creates_reset_token(
|
||||
result = conn.execute(
|
||||
sql.text(
|
||||
"""
|
||||
SELECT rpt.token
|
||||
SELECT rpt.token
|
||||
FROM reset_password_token rpt
|
||||
JOIN factor_password fp ON rpt.password_id = fp.id
|
||||
WHERE fp.user_id = :user_id
|
||||
@@ -351,12 +298,12 @@ def test_forgot_password_creates_reset_token(
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Verify user can login with the new password
|
||||
user_token = get_token("forgot@integration.test", "newSecurePassword123Z$!")
|
||||
user_token = get_token(forgot_email, "newSecurePassword123Z$!")
|
||||
assert user_token is not None
|
||||
|
||||
# Verify old password no longer works
|
||||
try:
|
||||
get_token("forgot@integration.test", "originalPassword123Z$")
|
||||
get_token(forgot_email, "originalPassword123Z$")
|
||||
assert False, "Old password should not work after reset"
|
||||
except AssertionError:
|
||||
pass # Expected - old password should fail
|
||||
@@ -368,27 +315,18 @@ def test_reset_password_with_expired_token(
|
||||
"""
|
||||
Test that resetting password with an expired token fails.
|
||||
"""
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get user ID for the forgot@integration.test user
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
user_response = response.json()["data"]
|
||||
found_user = next(
|
||||
(user for user in user_response if user["email"] == "forgot@integration.test"),
|
||||
None,
|
||||
)
|
||||
assert found_user is not None
|
||||
forgot_email = "forgot@integration.test"
|
||||
|
||||
# Get user ID via v2
|
||||
found_user = find_user_by_email(signoz, admin_token, forgot_email)
|
||||
|
||||
# Get org ID
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
|
||||
params={
|
||||
"email": "forgot@integration.test",
|
||||
"email": forgot_email,
|
||||
"ref": f"{signoz.self.host_configs['8080'].base()}",
|
||||
},
|
||||
timeout=5,
|
||||
@@ -400,7 +338,7 @@ def test_reset_password_with_expired_token(
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/factor_password/forgot"),
|
||||
json={
|
||||
"email": "forgot@integration.test",
|
||||
"email": forgot_email,
|
||||
"orgId": org_id,
|
||||
"frontendBaseURL": signoz.self.host_configs["8080"].base(),
|
||||
},
|
||||
@@ -432,8 +370,8 @@ def test_reset_password_with_expired_token(
|
||||
conn.execute(
|
||||
sql.text(
|
||||
"""
|
||||
UPDATE reset_password_token
|
||||
SET expires_at = :expired_time
|
||||
UPDATE reset_password_token
|
||||
SET expires_at = :expired_time
|
||||
WHERE id = :token_id
|
||||
"""
|
||||
),
|
||||
|
||||
@@ -4,23 +4,36 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
USER_EDITOR_EMAIL,
|
||||
USER_EDITOR_PASSWORD,
|
||||
)
|
||||
from fixtures.authutils import (
|
||||
change_user_role,
|
||||
create_active_user,
|
||||
)
|
||||
|
||||
ROLECHANGE_USER_EMAIL = "admin+rolechange@integration.test"
|
||||
ROLECHANGE_USER_PASSWORD = "password123Z$"
|
||||
|
||||
|
||||
def test_change_role(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
admin_token = get_token("admin@integration.test", "password123Z$")
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Create a new user as VIEWER
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": "admin+rolechange@integration.test", "role": "VIEWER"},
|
||||
json={"email": ROLECHANGE_USER_EMAIL, "role": "VIEWER"},
|
||||
timeout=2,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
|
||||
invited_user = response.json()["data"]
|
||||
reset_token = invited_user["token"]
|
||||
@@ -28,23 +41,22 @@ def test_change_role(
|
||||
# Activate user via reset password
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
|
||||
json={"password": "password123Z$", "token": reset_token},
|
||||
json={"password": ROLECHANGE_USER_PASSWORD, "token": reset_token},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# Make some API calls as new user
|
||||
new_user_token = get_token("admin+rolechange@integration.test", "password123Z$")
|
||||
new_user_token = get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
timeout=2,
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {new_user_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
new_user_id = response.json()["data"]["id"]
|
||||
new_user_data = response.json()["data"]
|
||||
new_user_id = new_user_data["id"]
|
||||
|
||||
# Make some API call which is protected
|
||||
response = requests.get(
|
||||
@@ -55,27 +67,27 @@ def test_change_role(
|
||||
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
# Change the new user's role - move to ADMIN
|
||||
# Change the new user's role via v2 - move VIEWER to ADMIN
|
||||
change_user_role(signoz, admin_token, new_user_id, "signoz-viewer", "signoz-admin")
|
||||
|
||||
# Update display name via v2
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{new_user_id}"),
|
||||
json={
|
||||
"displayName": "role change user",
|
||||
"role": "ADMIN",
|
||||
},
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{new_user_id}"),
|
||||
json={"displayName": "role change user"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
# Make some API calls again
|
||||
# Verify user can now access admin endpoints
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
timeout=2,
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {new_user_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
me_data = response.json()["data"]
|
||||
assert me_data is not None
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/org/preferences"),
|
||||
@@ -84,3 +96,306 @@ def test_change_role(
|
||||
)
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
|
||||
def test_get_user_roles(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify GET /api/v2/users/{id}/roles returns correct roles."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# admin+rolechange user was promoted to ADMIN in test_change_role
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
me = response.json()["data"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{me['id']}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
roles = response.json()["data"]
|
||||
|
||||
assert len(roles) >= 1
|
||||
assert "signoz-admin" in {r["name"] for r in roles}
|
||||
# verify role object shape
|
||||
for role in roles:
|
||||
assert "id" in role
|
||||
assert "name" in role
|
||||
assert "type" in role
|
||||
|
||||
|
||||
def test_assign_additional_role(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify POST /api/v2/users/{id}/roles assigns an additional role."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
me = response.json()["data"]
|
||||
user_id = me["id"]
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
|
||||
json={"name": "signoz-editor"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
roles = response.json()["data"]
|
||||
names = {r["name"] for r in roles}
|
||||
assert "signoz-admin" in names
|
||||
assert "signoz-editor" in names
|
||||
|
||||
|
||||
def test_get_users_by_role(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify GET /api/v2/roles/{role_id}/users returns users with that role."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
me = response.json()["data"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{me['id']}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
roles = response.json()["data"]
|
||||
editor_role_id = next((r for r in roles if r["name"] == "signoz-editor"), None)[
|
||||
"id"
|
||||
]
|
||||
assert editor_role_id is not None
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/roles/{editor_role_id}/users"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
user_emails = {u["email"] for u in response.json()["data"]}
|
||||
assert ROLECHANGE_USER_EMAIL in user_emails
|
||||
|
||||
|
||||
def test_remove_role(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify DELETE /api/v2/users/{id}/roles/{roleId} removes the role."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
me = response.json()["data"]
|
||||
user_id = me["id"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
roles = response.json()["data"]
|
||||
editor_role_id = next((r for r in roles if r["name"] == "signoz-editor"), None)[
|
||||
"id"
|
||||
]
|
||||
assert editor_role_id is not None
|
||||
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/users/{user_id}/roles/{editor_role_id}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
roles_after = response.json()["data"]
|
||||
names = {r["name"] for r in roles_after}
|
||||
assert "signoz-editor" not in names
|
||||
assert "signoz-admin" in names
|
||||
|
||||
|
||||
def test_user_with_roles_reflects_change(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify GET /api/v2/users/{id} userRoles reflects role removal."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={
|
||||
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
me = response.json()["data"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{me['id']}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
data = response.json()["data"]
|
||||
role_names = {ur["role"]["name"] for ur in data["userRoles"]}
|
||||
assert "signoz-admin" in role_names
|
||||
assert "signoz-editor" not in role_names
|
||||
|
||||
|
||||
def test_admin_cannot_assign_role_to_self(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify POST /api/v2/users/{own_id}/roles is rejected (self-mutation guard)."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
admin_data = response.json()["data"]
|
||||
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_data['id']}/roles"),
|
||||
json={"name": "signoz-editor"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_admin_cannot_remove_own_role(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify DELETE /api/v2/users/{own_id}/roles/{roleId} is rejected (self-mutation guard)."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
admin_data = response.json()["data"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_data['id']}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
roles = response.json()["data"]
|
||||
admin_role_id = next((r for r in roles if r["name"] == "signoz-admin"), None)["id"]
|
||||
assert admin_role_id is not None
|
||||
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/users/{admin_data['id']}/roles/{admin_role_id}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_editor_cannot_manage_roles(
|
||||
signoz: types.SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Verify non-admin cannot call role management endpoints."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# create a viewer user to be the target
|
||||
viewer_id = create_active_user(
|
||||
signoz,
|
||||
admin_token,
|
||||
email="viewer+roleauth@integration.test",
|
||||
role="VIEWER",
|
||||
password=ROLECHANGE_USER_PASSWORD,
|
||||
name="viewer roleauth",
|
||||
)
|
||||
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
|
||||
# GET roles — forbidden
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{viewer_id}/roles"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
# POST assign role — forbidden
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{viewer_id}/roles"),
|
||||
json={"name": "signoz-editor"},
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
# DELETE remove role — forbidden
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{viewer_id}/roles"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
viewer_roles = response.json()["data"]
|
||||
viewer_role_id = next(
|
||||
(r for r in viewer_roles if r["name"] == "signoz-viewer"), None
|
||||
)["id"]
|
||||
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(
|
||||
f"/api/v2/users/{viewer_id}/roles/{viewer_role_id}"
|
||||
),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
@@ -27,7 +27,7 @@ def test_duplicate_user_invite_rejected(
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
invited_user = response.json()["data"]
|
||||
reset_token = invited_user["token"]
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Callable
|
||||
import requests
|
||||
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.authutils import create_active_user
|
||||
from fixtures.types import SigNoz
|
||||
|
||||
|
||||
@@ -37,7 +38,7 @@ def test_reinvite_deleted_user(
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
invited_user = response.json()["data"]
|
||||
reset_token = invited_user["token"]
|
||||
|
||||
@@ -68,7 +69,7 @@ def test_reinvite_deleted_user(
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
reinvited_user = response.json()["data"]
|
||||
assert reinvited_user["role"] == "VIEWER"
|
||||
assert reinvited_user["id"] != invited_user["id"] # confirms a new user was created
|
||||
@@ -118,4 +119,67 @@ def test_bulk_invite(
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.CREATED
|
||||
assert response.status_code == HTTPStatus.CREATED, response.text
|
||||
|
||||
|
||||
def test_delete_user(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""
|
||||
Verify that after soft-deleting a user:
|
||||
1. GET /api/v2/users shows the user with status == "deleted"
|
||||
2. GET /api/v2/users/{id} returns the user with empty userRoles (roles revoked)
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
user_id = create_active_user(
|
||||
signoz,
|
||||
admin_token,
|
||||
email="delete-verify-v2@integration.test",
|
||||
role="EDITOR",
|
||||
password="password123Z$",
|
||||
name="delete verify v2",
|
||||
)
|
||||
|
||||
# verify user is active via v2
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
data = response.json()["data"]
|
||||
assert data["status"] == "active"
|
||||
assert len(data["userRoles"]) == 1
|
||||
|
||||
# delete the user
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{user_id}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
# verify status is deleted in the users list
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
deleted_user = next((u for u in users if u["id"] == user_id), None)
|
||||
assert deleted_user is not None
|
||||
assert deleted_user["status"] == "deleted"
|
||||
|
||||
# verify roles are revoked
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
data = response.json()["data"]
|
||||
assert data["status"] == "deleted"
|
||||
assert len(data["userRoles"]) == 1
|
||||
|
||||
@@ -18,13 +18,13 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""
|
||||
Verify that the composite unique index on (org_id, email, deleted_at) allows multiple
|
||||
deleted rows for the same (org_id, email) while still enforcing uniqueness among
|
||||
non-deleted rows.
|
||||
Verify that the partial unique index on (email, org_id) WHERE status != 'deleted'
|
||||
allows multiple deleted rows for the same (org_id, email) while still enforcing
|
||||
uniqueness among non-deleted rows.
|
||||
|
||||
Non-deleted users share deleted_at=zero-time, so the unique index prevents duplicates.
|
||||
Soft-deleted users each have a distinct deleted_at timestamp, so the index allows
|
||||
multiple deleted rows for the same (org_id, email).
|
||||
The partial unique index only covers rows where status != 'deleted', so active
|
||||
users cannot share the same (org_id, email). Deleted users are excluded from
|
||||
the index, allowing multiple deleted rows for the same (org_id, email).
|
||||
|
||||
Steps:
|
||||
1. Invite and soft-delete a user via the API (first deleted row).
|
||||
@@ -32,9 +32,9 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
3. Assert via SQL that exactly two deleted rows exist for the email.
|
||||
4. Assert via SQL that inserting one active row succeeds (no conflict — only
|
||||
deleted rows exist), then inserting a second active row for the same
|
||||
(org_id, email) fails with a unique constraint error (both have deleted_at=zero-time).
|
||||
(org_id, email) fails with a unique constraint error.
|
||||
5. Assert via SQL that inserting a third deleted row for the same (org_id, email)
|
||||
with a unique deleted_at succeeds — confirming the index does not cover deleted rows.
|
||||
succeeds — confirming the index does not cover deleted rows.
|
||||
6. Assert via SQL that the final count of deleted rows is 3.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
@@ -50,7 +50,7 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert resp.status_code == HTTPStatus.CREATED
|
||||
assert resp.status_code == HTTPStatus.CREATED, resp.text
|
||||
first_user_id = resp.json()["data"]["id"]
|
||||
|
||||
resp = requests.delete(
|
||||
@@ -71,7 +71,7 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert resp.status_code == HTTPStatus.CREATED
|
||||
assert resp.status_code == HTTPStatus.CREATED, resp.text
|
||||
second_user_id = resp.json()["data"]["id"]
|
||||
assert second_user_id != first_user_id
|
||||
|
||||
@@ -86,10 +86,9 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
with signoz.sqlstore.conn.connect() as conn:
|
||||
result = conn.execute(
|
||||
sql.text(
|
||||
"SELECT id, deleted_at FROM users"
|
||||
" WHERE email = :email AND deleted_at != :zero_time"
|
||||
"SELECT id FROM users WHERE email = :email AND status = 'deleted'"
|
||||
),
|
||||
{"email": UNIQUE_INDEX_USER_EMAIL, "zero_time": "0001-01-01 00:00:00"},
|
||||
{"email": UNIQUE_INDEX_USER_EMAIL},
|
||||
)
|
||||
deleted_rows = result.fetchall()
|
||||
|
||||
@@ -109,24 +108,24 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
org_id = result.fetchone()[0]
|
||||
|
||||
# Step 4: the unique index must still block a duplicate non-deleted row.
|
||||
# Both active rows have deleted_at=zero-time, so they share the same (org_id, email, zero-time)
|
||||
# tuple. First insert must succeed (only deleted rows exist so far).
|
||||
# Second insert for the same (org_id, email) with deleted_at=zero-time must fail.
|
||||
# The partial unique index covers rows WHERE status != 'deleted', so two active
|
||||
# rows with the same (org_id, email) must conflict.
|
||||
# First insert must succeed (only deleted rows exist so far).
|
||||
# Second insert for the same (org_id, email) with status='active' must fail.
|
||||
active_id = str(uuid.uuid4())
|
||||
with signoz.sqlstore.conn.connect() as conn:
|
||||
conn.execute(
|
||||
sql.text(
|
||||
"INSERT INTO users"
|
||||
" (id, display_name, email, org_id, is_root, status, created_at, updated_at, deleted_at)"
|
||||
" (id, display_name, email, org_id, is_root, status, created_at, updated_at)"
|
||||
" VALUES (:id, :display_name, :email, :org_id,"
|
||||
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, :zero_time)"
|
||||
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
|
||||
),
|
||||
{
|
||||
"id": active_id,
|
||||
"display_name": "first active row",
|
||||
"email": UNIQUE_INDEX_USER_EMAIL,
|
||||
"org_id": org_id,
|
||||
"zero_time": "0001-01-01 00:00:00",
|
||||
},
|
||||
)
|
||||
conn.commit()
|
||||
@@ -136,27 +135,26 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
conn.execute(
|
||||
sql.text(
|
||||
"INSERT INTO users"
|
||||
" (id, display_name, email, org_id, is_root, status, created_at, updated_at, deleted_at)"
|
||||
" (id, display_name, email, org_id, is_root, status, created_at, updated_at)"
|
||||
" VALUES (:id, :display_name, :email, :org_id,"
|
||||
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, :zero_time)"
|
||||
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
|
||||
),
|
||||
{
|
||||
"id": str(uuid.uuid4()),
|
||||
"display_name": "should violate index",
|
||||
"email": UNIQUE_INDEX_USER_EMAIL,
|
||||
"org_id": org_id,
|
||||
"zero_time": "0001-01-01 00:00:00",
|
||||
},
|
||||
)
|
||||
|
||||
# Step 5: a third deleted row with a unique deleted_at must be accepted
|
||||
# Step 5: a third deleted row must be accepted (excluded from partial index)
|
||||
with signoz.sqlstore.conn.connect() as conn:
|
||||
conn.execute(
|
||||
sql.text(
|
||||
"INSERT INTO users"
|
||||
" (id, display_name, email, org_id, is_root, status, created_at, updated_at, deleted_at)"
|
||||
" (id, display_name, email, org_id, is_root, status, created_at, updated_at)"
|
||||
" VALUES (:id, :display_name, :email, :org_id,"
|
||||
" false, 'deleted', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
|
||||
" false, 'deleted', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
|
||||
),
|
||||
{
|
||||
"id": str(uuid.uuid4()),
|
||||
@@ -172,9 +170,9 @@ def test_unique_index_allows_multiple_deleted_rows(
|
||||
result = conn.execute(
|
||||
sql.text(
|
||||
"SELECT COUNT(*) FROM users"
|
||||
" WHERE email = :email AND deleted_at != :zero_time"
|
||||
" WHERE email = :email AND status = 'deleted'"
|
||||
),
|
||||
{"email": UNIQUE_INDEX_USER_EMAIL, "zero_time": "0001-01-01 00:00:00"},
|
||||
{"email": UNIQUE_INDEX_USER_EMAIL},
|
||||
)
|
||||
count = result.fetchone()[0]
|
||||
|
||||
|
||||
203
tests/integration/src/passwordauthn/08_user.py
Normal file
203
tests/integration/src/passwordauthn/08_user.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import (
|
||||
USER_ADMIN_EMAIL,
|
||||
USER_ADMIN_PASSWORD,
|
||||
USER_EDITOR_EMAIL,
|
||||
USER_EDITOR_PASSWORD,
|
||||
)
|
||||
from fixtures.authutils import (
|
||||
assert_user_has_role,
|
||||
find_user_by_email,
|
||||
find_user_with_roles_by_email,
|
||||
)
|
||||
|
||||
|
||||
def test_list_users(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
|
||||
"""Verify GET /api/v2/users returns all users with correct fields."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
users = response.json()["data"]
|
||||
|
||||
admin_user = next((u for u in users if u["email"] == USER_ADMIN_EMAIL), None)
|
||||
assert admin_user is not None
|
||||
assert admin_user["isRoot"] is True
|
||||
assert admin_user["status"] == "active"
|
||||
|
||||
editor_user = next((u for u in users if u["email"] == USER_EDITOR_EMAIL), None)
|
||||
assert editor_user is not None
|
||||
assert editor_user["status"] == "active"
|
||||
|
||||
|
||||
def test_get_user(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
|
||||
"""Verify GET /api/v2/users/{id} returns user with roles."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
editor_user = find_user_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{editor_user['id']}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
data = response.json()["data"]
|
||||
assert data["email"] == USER_EDITOR_EMAIL
|
||||
assert data["status"] == "active"
|
||||
assert len(data["userRoles"]) >= 1
|
||||
assert_user_has_role(data, "signoz-editor")
|
||||
|
||||
|
||||
def test_get_my_user(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify GET /api/v2/users/me returns authenticated user with roles."""
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
data = response.json()["data"]
|
||||
assert data["email"] == USER_EDITOR_EMAIL
|
||||
assert data["status"] == "active"
|
||||
assert data["isRoot"] is False
|
||||
assert_user_has_role(data, "signoz-editor")
|
||||
|
||||
|
||||
def test_update_user(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify PUT /api/v2/users/{id} updates displayName."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
editor_user = find_user_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{editor_user['id']}"),
|
||||
json={"displayName": "updated editor"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
updated = find_user_with_roles_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
|
||||
assert updated["displayName"] == "updated editor"
|
||||
|
||||
|
||||
def test_update_my_user(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify PUT /api/v2/users/me updates own displayName."""
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
json={"displayName": "self updated editor"},
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.NO_CONTENT
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["data"]["displayName"] == "self updated editor"
|
||||
|
||||
|
||||
def test_admin_cannot_update_self_via_id(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify PUT /api/v2/users/{own_id} is rejected (self-mutation guard)."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
admin_id = response.json()["data"]["id"]
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_id}"),
|
||||
json={"displayName": "should fail"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.BAD_REQUEST
|
||||
|
||||
|
||||
def test_editor_cannot_list_users(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify non-admin cannot call GET /api/v2/users."""
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
|
||||
def test_editor_cannot_get_other_user(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify non-admin cannot call GET /api/v2/users/{other_id}."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
admin_id = response.json()["data"]["id"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_id}"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
|
||||
|
||||
def test_editor_cannot_update_other_user(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> None:
|
||||
"""Verify non-admin cannot call PUT /api/v2/users/{other_id}."""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
admin_id = response.json()["data"]["id"]
|
||||
|
||||
response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_id}"),
|
||||
json={"displayName": "hacked"},
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert response.status_code == HTTPStatus.FORBIDDEN
|
||||
@@ -50,14 +50,15 @@ def test_root_user_signoz_admin_assignment(
|
||||
):
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Get the user from the /user/me endpoint and extract the id
|
||||
user_response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
# Get the user from the v2 /users/me endpoint and extract the id
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
timeout=5,
|
||||
)
|
||||
assert user_response.status_code == HTTPStatus.OK
|
||||
user_id = user_response.json()["data"]["id"]
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
user_data = response.json()["data"]
|
||||
user_id = user_data["id"]
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/roles"),
|
||||
|
||||
@@ -11,6 +11,7 @@ from fixtures.auth import (
|
||||
USER_EDITOR_EMAIL,
|
||||
USER_EDITOR_PASSWORD,
|
||||
)
|
||||
from fixtures.authutils import change_user_role
|
||||
from fixtures.types import Operation, SigNoz
|
||||
|
||||
|
||||
@@ -46,13 +47,14 @@ def test_user_invite_accept_role_grant(
|
||||
|
||||
# Login with editor email and password
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
user_me_response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=2,
|
||||
timeout=5,
|
||||
)
|
||||
assert user_me_response.status_code == HTTPStatus.OK
|
||||
editor_id = user_me_response.json()["data"]["id"]
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
editor_data = response.json()["data"]
|
||||
editor_id = editor_data["id"]
|
||||
|
||||
# check the forbidden response for admin api for editor user
|
||||
admin_roles_response = requests.get(
|
||||
@@ -101,13 +103,14 @@ def test_user_update_role_grant(
|
||||
):
|
||||
# Get the editor user's id
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
user_me_response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=2,
|
||||
timeout=5,
|
||||
)
|
||||
assert user_me_response.status_code == HTTPStatus.OK
|
||||
editor_id = user_me_response.json()["data"]["id"]
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
editor_data = response.json()["data"]
|
||||
editor_id = editor_data["id"]
|
||||
|
||||
# Get the role id for viewer
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
@@ -120,15 +123,8 @@ def test_user_update_role_grant(
|
||||
roles_data = roles_response.json()["data"]
|
||||
org_id = roles_data[0]["orgId"]
|
||||
|
||||
# Update the user's role to viewer
|
||||
update_payload = {"role": "VIEWER"}
|
||||
update_response = requests.put(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/user/{editor_id}"),
|
||||
json=update_payload,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert update_response.status_code == HTTPStatus.OK
|
||||
# Update the user's role to viewer via v2 role endpoints
|
||||
change_user_role(signoz, admin_token, editor_id, "signoz-editor", "signoz-viewer")
|
||||
|
||||
# Check that user no longer has the editor role in the db
|
||||
with signoz.sqlstore.conn.connect() as conn:
|
||||
@@ -178,13 +174,14 @@ def test_user_delete_role_revoke(
|
||||
):
|
||||
# login with editor to get the user_id and check if user exists
|
||||
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
|
||||
user_me_response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {editor_token}"},
|
||||
timeout=2,
|
||||
timeout=5,
|
||||
)
|
||||
assert user_me_response.status_code == HTTPStatus.OK
|
||||
editor_id = user_me_response.json()["data"]["id"]
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
editor_data = response.json()["data"]
|
||||
editor_id = editor_data["id"]
|
||||
|
||||
# delete the editor user
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
@@ -14,7 +14,7 @@ def test_root_user_created(signoz: types.SigNoz) -> None:
|
||||
The root user service reconciles asynchronously after startup.
|
||||
|
||||
Phase 1: Poll /api/v1/version until setupCompleted=true.
|
||||
Phase 2: Poll /api/v1/user until it returns 200, confirming the root
|
||||
Phase 2: Poll /api/v2/users until it returns 200, confirming the root
|
||||
user actually exists and the impersonation provider works.
|
||||
"""
|
||||
# Phase 1: wait for setupCompleted
|
||||
@@ -39,13 +39,13 @@ def test_root_user_created(signoz: types.SigNoz) -> None:
|
||||
# Phase 2: wait for root user to be fully resolved
|
||||
for attempt in range(15):
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users"),
|
||||
timeout=2,
|
||||
)
|
||||
if response.status_code == HTTPStatus.OK:
|
||||
return
|
||||
logger.info(
|
||||
"Attempt %s: /api/v1/user returned %s, retrying ...",
|
||||
"Attempt %s: /api/v2/users returned %s, retrying ...",
|
||||
attempt + 1,
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@ from http import HTTPStatus
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.authutils import assert_user_has_role
|
||||
from fixtures.logger import setup_logger
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
@@ -32,7 +33,7 @@ def test_impersonated_user_is_admin(signoz: types.SigNoz) -> None:
|
||||
Listing users is an admin-only endpoint.
|
||||
"""
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user"),
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users"),
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
@@ -46,4 +47,11 @@ def test_impersonated_user_is_admin(signoz: types.SigNoz) -> None:
|
||||
None,
|
||||
)
|
||||
assert root_user is not None
|
||||
assert root_user["role"] == "ADMIN"
|
||||
|
||||
# Verify root user has admin role via v2 detail endpoint
|
||||
root_detail = requests.get(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v2/users/{root_user['id']}"),
|
||||
timeout=2,
|
||||
)
|
||||
assert root_detail.status_code == HTTPStatus.OK
|
||||
assert_user_has_role(root_detail.json()["data"], "signoz-admin")
|
||||
|
||||
@@ -38,12 +38,12 @@ def test_service_account_key_forbidden_on_user_me(
|
||||
create_user_admin: types.Operation, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""Service account key must not access /api/v1/user/me — it's user-only."""
|
||||
"""Service account key must not access /api/v2/users/me — it's user-only."""
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
_, api_key = create_service_account_with_key(signoz, token, "sa-user-me-test")
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"SIGNOZ-API-KEY": api_key},
|
||||
timeout=5,
|
||||
)
|
||||
@@ -51,7 +51,7 @@ def test_service_account_key_forbidden_on_user_me(
|
||||
## This shouldn't be allowed on api key identn, will be updated once we fix that.
|
||||
assert (
|
||||
response.status_code == HTTPStatus.NOT_FOUND
|
||||
), f"Expected 404 for service account on /user/me, got {response.status_code}: {response.text}"
|
||||
), f"Expected 404 for service account on /users/me, got {response.status_code}: {response.text}"
|
||||
|
||||
|
||||
def test_service_account_key_forbidden_on_user_preferences(
|
||||
@@ -311,7 +311,7 @@ def test_user_token_still_works_on_user_me(
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
response = requests.get(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
|
||||
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user