Compare commits
438 Commits
temp/study
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c217cc96c3 | ||
|
|
580cf32eb5 | ||
|
|
6d3580cbfa | ||
|
|
6c5d36caa9 | ||
|
|
c4a6c7e277 | ||
|
|
c9cd974dca | ||
|
|
5b3f121431 | ||
|
|
c79373314a | ||
|
|
858cd287fa | ||
|
|
afdb674068 | ||
|
|
30a6721472 | ||
|
|
518dfcbe59 | ||
|
|
424127c27c | ||
|
|
2dcb817de1 | ||
|
|
f6f8c78aaf | ||
|
|
3c99dfdfa5 | ||
|
|
6ed72519b8 | ||
|
|
fe910aaa0f | ||
|
|
0d362b3ba8 | ||
|
|
51e9ffb847 | ||
|
|
f497a154a2 | ||
|
|
659fa361ef | ||
|
|
84e77182f6 | ||
|
|
32619869e7 | ||
|
|
e9093126b2 | ||
|
|
195f44802d | ||
|
|
948bdb7881 | ||
|
|
7274d51236 | ||
|
|
5b580f8e63 | ||
|
|
5ce1f0b855 | ||
|
|
291e6a2af8 | ||
|
|
3a34fb4813 | ||
|
|
189781748a | ||
|
|
e7c812e07f | ||
|
|
c0e2dad3b5 | ||
|
|
bb3ccfae3b | ||
|
|
08f261b0f0 | ||
|
|
9b4fd46d2c | ||
|
|
f348f47951 | ||
|
|
9d700563c1 | ||
|
|
54b80c3949 | ||
|
|
1c815b130c | ||
|
|
9e9879fc8d | ||
|
|
af87c2e80b | ||
|
|
e1ac992e5a | ||
|
|
15161c09e8 | ||
|
|
ee5fbe41eb | ||
|
|
f2f3a7b24a | ||
|
|
dd0738ac70 | ||
|
|
1c4dfc931f | ||
|
|
605d6ba17d | ||
|
|
f017b07525 | ||
|
|
b5901ac174 | ||
|
|
caacbc086c | ||
|
|
9d06ccab48 | ||
|
|
de45292782 | ||
|
|
9f38305e5a | ||
|
|
e1c8b68cd2 | ||
|
|
76ec089a43 | ||
|
|
4117c7442b | ||
|
|
4153515767 | ||
|
|
6a0ee32616 | ||
|
|
1f13b60703 | ||
|
|
0865d2edaf | ||
|
|
8629c959f0 | ||
|
|
10760e6e1b | ||
|
|
4f45645b32 | ||
|
|
1417e22ae4 | ||
|
|
3051d442c0 | ||
|
|
ea15ce4e04 | ||
|
|
865a7a5a31 | ||
|
|
de4ca50a40 | ||
|
|
8cabaafc58 | ||
|
|
e9d66b8094 | ||
|
|
26d3d6b1e4 | ||
|
|
36d6debeab | ||
|
|
445b0cace8 | ||
|
|
132f10f8a3 | ||
|
|
14011bc277 | ||
|
|
f17a332c23 | ||
|
|
5ae7a464e6 | ||
|
|
51c3628f6e | ||
|
|
6a69076828 | ||
|
|
edd04e2f07 | ||
|
|
ee734cf78c | ||
|
|
6d137bcdff | ||
|
|
444161671d | ||
|
|
31e9e896ec | ||
|
|
325974292f | ||
|
|
7c1a531d01 | ||
|
|
5a45532a72 | ||
|
|
e9501d2e0f | ||
|
|
c306e66bcd | ||
|
|
767a0cc28e | ||
|
|
4f9efcc133 | ||
|
|
bf2dd612e0 | ||
|
|
d3f15022a4 | ||
|
|
a5c021e96c | ||
|
|
8b9fcae0cb | ||
|
|
e75abcb108 | ||
|
|
ffa15f9ec1 | ||
|
|
87562f5387 | ||
|
|
11a7512a25 | ||
|
|
f925a15f30 | ||
|
|
a3fe2c2589 | ||
|
|
2719c9b6a7 | ||
|
|
b4282be3ac | ||
|
|
15fe3a7163 | ||
|
|
30b98582b4 | ||
|
|
99b9e27bca | ||
|
|
7e72f501a7 | ||
|
|
97d7b81a73 | ||
|
|
9c5b50ae3a | ||
|
|
fd13c6dee4 | ||
|
|
a894b08255 | ||
|
|
2c6c034e60 | ||
|
|
0a81bf8060 | ||
|
|
b38ffce7f2 | ||
|
|
696d83eeaf | ||
|
|
d98851dcf2 | ||
|
|
a103bb453d | ||
|
|
abae0c7f27 | ||
|
|
0cc1fb0edb | ||
|
|
f2ef42bd7f | ||
|
|
22158a3cee | ||
|
|
e9afbede24 | ||
|
|
479cba7dd2 | ||
|
|
5449374ad8 | ||
|
|
68b9cc2b81 | ||
|
|
0f62a04f92 | ||
|
|
b4706743ba | ||
|
|
1eba57b250 | ||
|
|
c9cbc8d9ad | ||
|
|
23ba9dacd1 | ||
|
|
fce1cce02e | ||
|
|
f8b3cac191 | ||
|
|
09e6342a2a | ||
|
|
2d343cde38 | ||
|
|
815c60485d | ||
|
|
42c38d5d17 | ||
|
|
f214bd1dbf | ||
|
|
7163df599b | ||
|
|
a5ba770637 | ||
|
|
8f7c0b07a7 | ||
|
|
b744e228c7 | ||
|
|
4853bb1f8c | ||
|
|
cf693582a5 | ||
|
|
a9e47c3779 | ||
|
|
d04c07a887 | ||
|
|
c36702714e | ||
|
|
3cb6f6704d | ||
|
|
14ab4c9b79 | ||
|
|
9aa0073fef | ||
|
|
e124a6a269 | ||
|
|
666bfa7a0f | ||
|
|
c547ba28e5 | ||
|
|
10f8616d47 | ||
|
|
3718f6da59 | ||
|
|
4882d7d524 | ||
|
|
a0b722887d | ||
|
|
0b890154b4 | ||
|
|
d0ef7b181e | ||
|
|
19c6aead54 | ||
|
|
032ac75932 | ||
|
|
c6f5a19256 | ||
|
|
a4ce770941 | ||
|
|
201d5c24a5 | ||
|
|
ab8b42fbbe | ||
|
|
f99821bc40 | ||
|
|
7c051601f2 | ||
|
|
b9f9c00da5 | ||
|
|
49ff86e65a | ||
|
|
2dc6febb38 | ||
|
|
4ae268d867 | ||
|
|
9d78d67461 | ||
|
|
055d0ba90d | ||
|
|
09dc95cfe9 | ||
|
|
d218cd5733 | ||
|
|
f6da9adb86 | ||
|
|
c82f54b548 | ||
|
|
dba038c6e0 | ||
|
|
bca761498a | ||
|
|
0e6bd90fdf | ||
|
|
f3256aeac4 | ||
|
|
c9f1526e33 | ||
|
|
dba536578b | ||
|
|
15ceb228fa | ||
|
|
6b3c6fc722 | ||
|
|
edcae53b64 | ||
|
|
72fda90ec2 | ||
|
|
8acfc3c9f7 | ||
|
|
463ae443f9 | ||
|
|
f72535a15f | ||
|
|
e21e99ce64 | ||
|
|
d1559a3262 | ||
|
|
1ccb9bb4c2 | ||
|
|
0c059df327 | ||
|
|
8a5539679c | ||
|
|
89b188f73d | ||
|
|
bb4d6117ac | ||
|
|
1110864549 | ||
|
|
5cb515cade | ||
|
|
41d5f6a00c | ||
|
|
61ec1ef28e | ||
|
|
529a9e7009 | ||
|
|
b00687b43f | ||
|
|
8771919de6 | ||
|
|
497972f23c | ||
|
|
a9e30919d1 | ||
|
|
925c4c4a3d | ||
|
|
e66bfe5961 | ||
|
|
42943f72b7 | ||
|
|
7a72a209e5 | ||
|
|
44f00943a8 | ||
|
|
8867e1ef38 | ||
|
|
c08e520941 | ||
|
|
139cc4452d | ||
|
|
2f3baeb302 | ||
|
|
3d42b0058e | ||
|
|
ed70e3c5f5 | ||
|
|
7d6918f8b6 | ||
|
|
2885bc851e | ||
|
|
857258f8c3 | ||
|
|
ece5c2b7ad | ||
|
|
1078f98388 | ||
|
|
b4e2326f38 | ||
|
|
c79b154215 | ||
|
|
a59c0188cc | ||
|
|
3df426625a | ||
|
|
646f359f33 | ||
|
|
81167c6947 | ||
|
|
bc1295b93a | ||
|
|
3db0e1f66a | ||
|
|
d52b54aeb3 | ||
|
|
c8608c18ae | ||
|
|
cde99ba1a0 | ||
|
|
a7e9d442b7 | ||
|
|
0b0d622f6b | ||
|
|
127e760b00 | ||
|
|
63e333de0d | ||
|
|
af57d11b6a | ||
|
|
8d61ee338b | ||
|
|
5d9dc17645 | ||
|
|
5288022ffd | ||
|
|
cdc18af4a2 | ||
|
|
918a90e3c1 | ||
|
|
e8ce7b22f5 | ||
|
|
b752fdd30a | ||
|
|
d73b7fadab | ||
|
|
bc4b65dbb9 | ||
|
|
e716a2a7b1 | ||
|
|
891c56b059 | ||
|
|
d01e6fc891 | ||
|
|
17f8c1040f | ||
|
|
ffa5a9725e | ||
|
|
92cab8e049 | ||
|
|
7b9e6e3cbb | ||
|
|
4837ddb601 | ||
|
|
9c818955af | ||
|
|
134a051196 | ||
|
|
c904ab5d99 | ||
|
|
d53f9a7e16 | ||
|
|
1b01b61026 | ||
|
|
95a26cecba | ||
|
|
15af828005 | ||
|
|
e5b99703ac | ||
|
|
f0941c7b2e | ||
|
|
12c9b921a7 | ||
|
|
52228bc6c4 | ||
|
|
79988b448f | ||
|
|
4bfd7ba3d7 | ||
|
|
3349158213 | ||
|
|
1c9f4efb9f | ||
|
|
fd839ff1db | ||
|
|
09cbe4aa0d | ||
|
|
096e38ee91 | ||
|
|
48590c03e2 | ||
|
|
38af897bcc | ||
|
|
2b79678e63 | ||
|
|
a4f54baf1f | ||
|
|
4e6c42dd17 | ||
|
|
39bd169b89 | ||
|
|
c7c2d2a7ef | ||
|
|
0cfb809605 | ||
|
|
6a378ed7b4 | ||
|
|
8e41847523 | ||
|
|
779df62093 | ||
|
|
3763794531 | ||
|
|
e9fa68e1f3 | ||
|
|
7bd3e1c453 | ||
|
|
a48455b2b3 | ||
|
|
fbb66f14ba | ||
|
|
54b67d9cfd | ||
|
|
1a193015a7 | ||
|
|
245179cbf7 | ||
|
|
dbb6b333c8 | ||
|
|
56f8e53d88 | ||
|
|
2f4e371dac | ||
|
|
db75ec56bc | ||
|
|
02755a6527 | ||
|
|
9f089e0784 | ||
|
|
fb9a7ad3cd | ||
|
|
ad631d70b6 | ||
|
|
c44efeab33 | ||
|
|
e9743fa7ac | ||
|
|
b7ece08d3e | ||
|
|
e5f4f5cc72 | ||
|
|
4437630127 | ||
|
|
89639b239e | ||
|
|
785ae9f0bd | ||
|
|
8752022cef | ||
|
|
c7e4a9c45d | ||
|
|
bf92c92204 | ||
|
|
bd63633be7 | ||
|
|
1158e1199b | ||
|
|
0a60c49314 | ||
|
|
c25e3beb81 | ||
|
|
c9e0f2b9ca | ||
|
|
6d831849c1 | ||
|
|
83eeb46f99 | ||
|
|
287558dc9d | ||
|
|
83aad793c2 | ||
|
|
3eff689c85 | ||
|
|
f5bcd65e2e | ||
|
|
e7772d93af | ||
|
|
bbf987ebd7 | ||
|
|
105c3a3b8c | ||
|
|
c1a4a5b8db | ||
|
|
c9591f4341 | ||
|
|
fd216fdee1 | ||
|
|
f5bf4293a1 | ||
|
|
155a44a25d | ||
|
|
4b21c9d5f9 | ||
|
|
5ef0a18867 | ||
|
|
c8266d1aec | ||
|
|
adfd16ce1b | ||
|
|
6db74a5585 | ||
|
|
f8e0db0085 | ||
|
|
01e0b36d62 | ||
|
|
e90bb016f7 | ||
|
|
bdecbfb7f5 | ||
|
|
3dced2b082 | ||
|
|
1285666087 | ||
|
|
1655397eaa | ||
|
|
718360a966 | ||
|
|
2f5995b071 | ||
|
|
a061c9de0f | ||
|
|
7b1ca9a1a6 | ||
|
|
0d1131e99f | ||
|
|
44d1d0f994 | ||
|
|
bdce97a727 | ||
|
|
5f8cfbe474 | ||
|
|
55c2f98768 | ||
|
|
624bb5cc62 | ||
|
|
95f8fa1566 | ||
|
|
fa97e63912 | ||
|
|
c8419c1f82 | ||
|
|
e05ede3978 | ||
|
|
437d0d1345 | ||
|
|
64e379c413 | ||
|
|
d05d394f57 | ||
|
|
b4e5085a5a | ||
|
|
88f7502a15 | ||
|
|
b0442761ac | ||
|
|
d539ca9bab | ||
|
|
c8194e9abb | ||
|
|
c919102fee | ||
|
|
765370752c | ||
|
|
db5c102f14 | ||
|
|
8a0c5bc3c8 | ||
|
|
a28ccffd01 | ||
|
|
84adb3e163 | ||
|
|
6d416061f2 | ||
|
|
b8cf0a3041 | ||
|
|
1793026c78 | ||
|
|
c122bc09b4 | ||
|
|
d22039b1a1 | ||
|
|
457b6970b1 | ||
|
|
1267f9ad6e | ||
|
|
ecd9498970 | ||
|
|
bac8f8b211 | ||
|
|
800a34e625 | ||
|
|
934c09b36b | ||
|
|
c62d41edf0 | ||
|
|
264af06ca0 | ||
|
|
dcc902fb27 | ||
|
|
a4f24a231b | ||
|
|
416e8d2a5e | ||
|
|
43a6c7dcd6 | ||
|
|
5005cae2ad | ||
|
|
3123447005 | ||
|
|
6c59b5405e | ||
|
|
d26b57b0d8 | ||
|
|
da17375f10 | ||
|
|
a96489d06e | ||
|
|
8c29debb52 | ||
|
|
9cebd49a2c | ||
|
|
a22ef64bb0 | ||
|
|
c770a1a4e1 | ||
|
|
101b3668b5 | ||
|
|
1b1aa4915b | ||
|
|
f9a70a3a69 | ||
|
|
d3be2632b6 | ||
|
|
78e4f4f386 | ||
|
|
cbb24d9a34 | ||
|
|
9ffe0d8143 | ||
|
|
1a1ef5aff8 | ||
|
|
8b21ba5db9 | ||
|
|
1b818dd05d | ||
|
|
3c3641493e | ||
|
|
411414fa45 | ||
|
|
735b90722d | ||
|
|
8b485de584 | ||
|
|
d595dcc222 | ||
|
|
7ddaa84387 | ||
|
|
6d5f0adab9 | ||
|
|
2c19f0171f | ||
|
|
9a5bcb6b64 | ||
|
|
96cdf21a92 | ||
|
|
1aa5f5d0e1 | ||
|
|
6ac812b5af | ||
|
|
0b4831ca04 | ||
|
|
340aa9ec21 | ||
|
|
5a47a4349b | ||
|
|
80f0c6dd92 | ||
|
|
c0acc69f87 | ||
|
|
9114b44c0e | ||
|
|
c68096152d | ||
|
|
4d8d0223e7 | ||
|
|
2f4b8f6f80 | ||
|
|
a54c3a3d7f | ||
|
|
2c59c1196d | ||
|
|
73ff89a80a | ||
|
|
b2dc2790d8 | ||
|
|
dc8e4365f5 | ||
|
|
eb38dd548a | ||
|
|
0ac5d97495 | ||
|
|
710f7740d3 |
@@ -42,7 +42,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
schema-migrator-sync:
|
||||
image: signoz/signoz-schema-migrator:v0.129.5
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -55,7 +55,7 @@ services:
|
||||
condition: service_healthy
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
image: signoz/signoz-schema-migrator:v0.129.5
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
services:
|
||||
signoz-otel-collector:
|
||||
image: signoz/signoz-otel-collector:v0.128.2
|
||||
image: signoz/signoz-otel-collector:v0.129.6
|
||||
container_name: signoz-otel-collector-dev
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
|
||||
146
.github/CODEOWNERS
vendored
@@ -1,87 +1,137 @@
|
||||
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
# Owners are automatically requested for review for PRs that changes code
|
||||
|
||||
# that they own.
|
||||
|
||||
/frontend/ @SigNoz/frontend @YounixM
|
||||
/frontend/src/container/MetricsApplication @srikanthccv
|
||||
/frontend/src/container/NewWidget/RightContainer/types.ts @srikanthccv
|
||||
/frontend/ @SigNoz/frontend-maintainers
|
||||
|
||||
# Dashboard, Alert, Metrics, Service Map, Services
|
||||
/frontend/src/container/ListOfDashboard/ @srikanthccv
|
||||
/frontend/src/container/NewDashboard/ @srikanthccv
|
||||
/frontend/src/pages/DashboardsListPage/ @srikanthccv
|
||||
/frontend/src/pages/DashboardWidget/ @srikanthccv
|
||||
/frontend/src/pages/NewDashboard/ @srikanthccv
|
||||
/frontend/src/providers/Dashboard/ @srikanthccv
|
||||
# Onboarding
|
||||
|
||||
# Alerts
|
||||
/frontend/src/container/AlertHistory/ @srikanthccv
|
||||
/frontend/src/container/AllAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/AnomalyAlertEvaluationView/ @srikanthccv
|
||||
/frontend/src/container/CreateAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/CreateAlertRule/ @srikanthccv
|
||||
/frontend/src/container/EditAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/FormAlertChannels/ @srikanthccv
|
||||
/frontend/src/container/FormAlertRules/ @srikanthccv
|
||||
/frontend/src/container/ListAlertRules/ @srikanthccv
|
||||
/frontend/src/container/TriggeredAlerts/ @srikanthccv
|
||||
/frontend/src/pages/AlertChannelCreate/ @srikanthccv
|
||||
/frontend/src/pages/AlertDetails/ @srikanthccv
|
||||
/frontend/src/pages/AlertHistory/ @srikanthccv
|
||||
/frontend/src/pages/AlertList/ @srikanthccv
|
||||
/frontend/src/pages/CreateAlert/ @srikanthccv
|
||||
/frontend/src/providers/Alert.tsx @srikanthccv
|
||||
|
||||
# Metrics
|
||||
/frontend/src/container/MetricsExplorer/ @srikanthccv
|
||||
/frontend/src/pages/MetricsApplication/ @srikanthccv
|
||||
/frontend/src/pages/MetricsExplorer/ @srikanthccv
|
||||
|
||||
# Services and Service Map
|
||||
/frontend/src/container/ServiceApplication/ @srikanthccv
|
||||
/frontend/src/container/ServiceTable/ @srikanthccv
|
||||
/frontend/src/pages/Services/ @srikanthccv
|
||||
/frontend/src/pages/ServiceTopLevelOperations/ @srikanthccv
|
||||
/frontend/src/container/Home/Services/ @srikanthccv
|
||||
/frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json @makeavish
|
||||
/frontend/src/container/OnboardingV2Container/AddDataSource/AddDataSource.tsx @makeavish
|
||||
|
||||
/deploy/ @SigNoz/devops
|
||||
.github @SigNoz/devops
|
||||
|
||||
# Scaffold Owners
|
||||
/pkg/config/ @grandwizard28
|
||||
/pkg/errors/ @grandwizard28
|
||||
/pkg/factory/ @grandwizard28
|
||||
/pkg/types/ @grandwizard28
|
||||
/pkg/valuer/ @grandwizard28
|
||||
/cmd/ @grandwizard28
|
||||
.golangci.yml @grandwizard28
|
||||
|
||||
/pkg/config/ @vikrantgupta25
|
||||
/pkg/errors/ @vikrantgupta25
|
||||
/pkg/factory/ @vikrantgupta25
|
||||
/pkg/types/ @vikrantgupta25
|
||||
/pkg/valuer/ @vikrantgupta25
|
||||
/cmd/ @vikrantgupta25
|
||||
.golangci.yml @vikrantgupta25
|
||||
|
||||
# Zeus Owners
|
||||
|
||||
/pkg/zeus/ @vikrantgupta25
|
||||
/ee/zeus/ @vikrantgupta25
|
||||
/pkg/licensing/ @vikrantgupta25
|
||||
/ee/licensing/ @vikrantgupta25
|
||||
|
||||
# SQL Owners
|
||||
|
||||
/pkg/sqlmigration/ @vikrantgupta25
|
||||
/ee/sqlmigration/ @vikrantgupta25
|
||||
/pkg/sqlschema/ @vikrantgupta25
|
||||
/ee/sqlschema/ @vikrantgupta25
|
||||
|
||||
# Analytics Owners
|
||||
|
||||
/pkg/analytics/ @vikrantgupta25
|
||||
/pkg/statsreporter/ @vikrantgupta25
|
||||
|
||||
# Querier Owners
|
||||
|
||||
/pkg/querier/ @srikanthccv
|
||||
/pkg/variables/ @srikanthccv
|
||||
/pkg/types/querybuildertypes/ @srikanthccv
|
||||
/pkg/types/telemetrytypes/ @srikanthccv
|
||||
/pkg/querybuilder/ @srikanthccv
|
||||
/pkg/telemetrylogs/ @srikanthccv
|
||||
/pkg/telemetrymetadata/ @srikanthccv
|
||||
/pkg/telemetrymetrics/ @srikanthccv
|
||||
/pkg/telemetrytraces/ @srikanthccv
|
||||
|
||||
# AuthN / AuthZ Owners
|
||||
# Metrics
|
||||
|
||||
/pkg/authz/ @vikrantgupta25 @grandwizard28
|
||||
/pkg/types/metrictypes/ @srikanthccv
|
||||
/pkg/types/metricsexplorertypes/ @srikanthccv
|
||||
/pkg/modules/metricsexplorer/ @srikanthccv
|
||||
/pkg/prometheus/ @srikanthccv
|
||||
|
||||
# APM
|
||||
|
||||
/pkg/types/servicetypes/ @srikanthccv
|
||||
/pkg/types/apdextypes/ @srikanthccv
|
||||
/pkg/modules/apdex/ @srikanthccv
|
||||
/pkg/modules/services/ @srikanthccv
|
||||
|
||||
# Dashboard
|
||||
|
||||
/pkg/types/dashboardtypes/ @srikanthccv
|
||||
/pkg/modules/dashboard/ @srikanthccv
|
||||
|
||||
# Rule/Alertmanager
|
||||
|
||||
/pkg/types/ruletypes/ @srikanthccv
|
||||
/pkg/types/alertmanagertypes @srikanthccv
|
||||
/pkg/alertmanager/ @srikanthccv
|
||||
/pkg/ruler/ @srikanthccv
|
||||
|
||||
# Correlation-adjacent
|
||||
|
||||
/pkg/contextlinks/ @srikanthccv
|
||||
/pkg/types/parsertypes/ @srikanthccv
|
||||
/pkg/queryparser/ @srikanthccv
|
||||
|
||||
# AuthN / AuthZ Owners
|
||||
|
||||
/pkg/authz/ @vikrantgupta25
|
||||
/ee/authz/ @vikrantgupta25
|
||||
/pkg/authn/ @vikrantgupta25
|
||||
/ee/authn/ @vikrantgupta25
|
||||
/pkg/modules/user/ @vikrantgupta25
|
||||
/pkg/modules/session/ @vikrantgupta25
|
||||
/pkg/modules/organization/ @vikrantgupta25
|
||||
/pkg/modules/authdomain/ @vikrantgupta25
|
||||
/pkg/modules/role/ @vikrantgupta25
|
||||
|
||||
# Integration tests
|
||||
|
||||
/tests/integration/ @vikrantgupta25
|
||||
|
||||
# OpenAPI types generator
|
||||
|
||||
/frontend/src/api @SigNoz/frontend-maintainers
|
||||
|
||||
# Dashboard Owners
|
||||
|
||||
/frontend/src/hooks/dashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/providers/Dashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Types
|
||||
|
||||
/frontend/src/api/types/dashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard List
|
||||
|
||||
/frontend/src/pages/DashboardsListPage/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/ListOfDashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Page
|
||||
|
||||
/frontend/src/pages/DashboardPage/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/DashboardContainer/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/GridCardLayout/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
|
||||
|
||||
## Public Dashboard Page
|
||||
|
||||
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
|
||||
|
||||
## UplotV2
|
||||
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
|
||||
89
.github/pull_request_template.md
vendored
@@ -1,72 +1,85 @@
|
||||
## 📄 Summary
|
||||
|
||||
<!-- Describe the purpose of the PR in a few sentences. What does it fix/add/update? -->
|
||||
## Pull Request
|
||||
|
||||
---
|
||||
|
||||
## ✅ Changes
|
||||
### 📄 Summary
|
||||
> Why does this change exist?
|
||||
> What problem does it solve, and why is this the right approach?
|
||||
|
||||
- [ ] Feature: Brief description
|
||||
- [ ] Bug fix: Brief description
|
||||
|
||||
|
||||
#### Screenshots / Screen Recordings (if applicable)
|
||||
> Include screenshots or screen recordings that clearly show the behavior before the change and the result after the change. This helps reviewers quickly understand the impact and verify the update.
|
||||
|
||||
|
||||
#### Issues closed by this PR
|
||||
> Reference issues using `Closes #issue-number` to enable automatic closure on merge.
|
||||
|
||||
---
|
||||
|
||||
## 🏷️ Required: Add Relevant Labels
|
||||
### ✅ Change Type
|
||||
_Select all that apply_
|
||||
|
||||
> ⚠️ **Manually add appropriate labels in the PR sidebar**
|
||||
Please select one or more labels (as applicable):
|
||||
|
||||
ex:
|
||||
|
||||
- `frontend`
|
||||
- `backend`
|
||||
- `devops`
|
||||
- `bug`
|
||||
- `enhancement`
|
||||
- `ui`
|
||||
- `test`
|
||||
- [ ] ✨ Feature
|
||||
- [ ] 🐛 Bug fix
|
||||
- [ ] ♻️ Refactor
|
||||
- [ ] 🛠️ Infra / Tooling
|
||||
- [ ] 🧪 Test-only
|
||||
|
||||
---
|
||||
|
||||
## 👥 Reviewers
|
||||
### 🐛 Bug Context
|
||||
> Required if this PR fixes a bug
|
||||
|
||||
> Tag the relevant teams for review:
|
||||
#### Root Cause
|
||||
> What caused the issue?
|
||||
> Regression, faulty assumption, edge case, refactor, etc.
|
||||
|
||||
- frontend / backend / devops
|
||||
#### Fix Strategy
|
||||
> How does this PR address the root cause?
|
||||
|
||||
---
|
||||
|
||||
## 🧪 How to Test
|
||||
### 🧪 Testing Strategy
|
||||
> How was this change validated?
|
||||
|
||||
<!-- Describe how reviewers can test this PR -->
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
- Tests added/updated:
|
||||
- Manual verification:
|
||||
- Edge cases covered:
|
||||
|
||||
---
|
||||
|
||||
## 🔍 Related Issues
|
||||
### ⚠️ Risk & Impact Assessment
|
||||
> What could break? How do we recover?
|
||||
|
||||
<!-- Reference any related issues (e.g. Fixes #123, Closes #456) -->
|
||||
Closes #
|
||||
- Blast radius:
|
||||
- Potential regressions:
|
||||
- Rollback plan:
|
||||
|
||||
---
|
||||
|
||||
## 📸 Screenshots / Screen Recording (if applicable / mandatory for UI related changes)
|
||||
### 📝 Changelog
|
||||
> Fill only if this affects users, APIs, UI, or documented behavior
|
||||
> Use **N/A** for internal or non-user-facing changes
|
||||
|
||||
<!-- Add screenshots or GIFs to help visualize changes -->
|
||||
| Field | Value |
|
||||
|------|-------|
|
||||
| Deployment Type | Cloud / OSS / Enterprise |
|
||||
| Change Type | Feature / Bug Fix / Maintenance |
|
||||
| Description | User-facing summary |
|
||||
|
||||
---
|
||||
|
||||
## 📋 Checklist
|
||||
|
||||
- [ ] Dev Review
|
||||
- [ ] Test cases added (Unit/ Integration / E2E)
|
||||
- [ ] Manually tested the changes
|
||||
|
||||
### 📋 Checklist
|
||||
- [ ] Tests added or explicitly not required
|
||||
- [ ] Manually tested
|
||||
- [ ] Breaking changes documented
|
||||
- [ ] Backward compatibility considered
|
||||
|
||||
---
|
||||
|
||||
## 👀 Notes for Reviewers
|
||||
|
||||
<!-- Anything reviewers should keep in mind while reviewing -->
|
||||
|
||||
---
|
||||
|
||||
7
.github/workflows/build-community.yaml
vendored
@@ -3,8 +3,8 @@ name: build-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -69,14 +69,13 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/community
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=community
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.time=${{ needs.prepare.outputs.time }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/community/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
8
.github/workflows/build-enterprise.yaml
vendored
@@ -69,6 +69,8 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
echo 'DOCS_BASE_URL="https://signoz.io"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -84,7 +86,7 @@ jobs:
|
||||
JS_INPUT_ARTIFACT_CACHE_KEY: enterprise-dotenv-${{ github.sha }}
|
||||
JS_INPUT_ARTIFACT_PATH: frontend/.env
|
||||
JS_OUTPUT_ARTIFACT_CACHE_KEY: enterprise-jsbuild-${{ github.sha }}
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
JS_OUTPUT_ARTIFACT_PATH: frontend/build
|
||||
DOCKER_BUILD: false
|
||||
DOCKER_MANIFEST: false
|
||||
go-build:
|
||||
@@ -99,7 +101,7 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -107,10 +109,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
|
||||
8
.github/workflows/build-staging.yaml
vendored
@@ -68,6 +68,8 @@ jobs:
|
||||
echo 'TUNNEL_DOMAIN="${{ secrets.NP_TUNNEL_DOMAIN }}"' >> frontend/.env
|
||||
echo 'PYLON_APP_ID="${{ secrets.NP_PYLON_APP_ID }}"' >> frontend/.env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.NP_APPCUES_APP_ID }}"' >> frontend/.env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.NP_PYLON_IDENTITY_SECRET }}"' >> frontend/.env
|
||||
echo 'DOCS_BASE_URL="https://staging.signoz.io"' >> frontend/.env
|
||||
- name: cache-dotenv
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -98,7 +100,7 @@ jobs:
|
||||
GO_BUILD_CONTEXT: ./cmd/enterprise
|
||||
GO_BUILD_FLAGS: >-
|
||||
-tags timetzdata
|
||||
-ldflags='-linkmode external -extldflags \"-static\" -s -w
|
||||
-ldflags='-s -w
|
||||
-X github.com/SigNoz/signoz/pkg/version.version=${{ needs.prepare.outputs.version }}
|
||||
-X github.com/SigNoz/signoz/pkg/version.variant=enterprise
|
||||
-X github.com/SigNoz/signoz/pkg/version.hash=${{ needs.prepare.outputs.hash }}
|
||||
@@ -106,10 +108,8 @@ jobs:
|
||||
-X github.com/SigNoz/signoz/pkg/version.branch=${{ needs.prepare.outputs.branch }}
|
||||
-X github.com/SigNoz/signoz/ee/zeus.url=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.staging.signoz.cloud
|
||||
-X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.staging.signoz.cloud/api/v1
|
||||
-X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr'
|
||||
GO_CGO_ENABLED: 1
|
||||
DOCKER_BASE_IMAGES: '{"alpine": "alpine:3.20.3"}'
|
||||
DOCKER_DOCKERFILE_PATH: ./cmd/enterprise/Dockerfile.multi-arch
|
||||
DOCKER_MANIFEST: true
|
||||
@@ -125,4 +125,4 @@ jobs:
|
||||
GITHUB_SILENT: true
|
||||
GITHUB_REPOSITORY_NAME: charts-saas-v3-staging
|
||||
GITHUB_EVENT_NAME: releaser
|
||||
GITHUB_EVENT_PAYLOAD: "{\"deployment\": \"${{ needs.prepare.outputs.deployment }}\", \"signoz_version\": \"${{ needs.prepare.outputs.version }}\"}"
|
||||
GITHUB_EVENT_PAYLOAD: '{"deployment": "${{ needs.prepare.outputs.deployment }}", "signoz_version": "${{ needs.prepare.outputs.version }}"}'
|
||||
|
||||
7
.github/workflows/commitci.yaml
vendored
@@ -25,3 +25,10 @@ jobs:
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'pkg' directory"
|
||||
fi
|
||||
|
||||
if grep -R --include="*.go" '.*/ee/.*' cmd/community/; then
|
||||
echo "Error: Found references to 'ee' packages in 'cmd/community' directory"
|
||||
exit 1
|
||||
else
|
||||
echo "No references to 'ee' packages found in 'cmd/community' directory"
|
||||
fi
|
||||
|
||||
20
.github/workflows/goci.yaml
vendored
@@ -65,6 +65,10 @@ jobs:
|
||||
set -ex
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu musl-tools
|
||||
- name: node-install
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: docker-community
|
||||
shell: bash
|
||||
run: |
|
||||
@@ -73,3 +77,19 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
make docker-build-enterprise
|
||||
openapi:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: self-checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: go-install
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.24"
|
||||
- name: generate-openapi
|
||||
run: |
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
git diff --compact-summary --exit-code || (echo; echo "Unexpected difference in openapi spec. Run go run cmd/enterprise/*.go generate openapi locally and commit."; exit 1)
|
||||
|
||||
12
.github/workflows/gor-signoz-community.yaml
vendored
@@ -3,8 +3,8 @@ name: gor-signoz-community
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -21,6 +21,10 @@ jobs:
|
||||
shell: bash
|
||||
run: |
|
||||
echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV
|
||||
- name: node-setup
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
@@ -89,7 +93,7 @@ jobs:
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: release --config ${{ env.CONFIG_PATH }} --clean --split
|
||||
workdir: .
|
||||
env:
|
||||
@@ -147,7 +151,7 @@ jobs:
|
||||
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: continue --merge
|
||||
workdir: .
|
||||
env:
|
||||
|
||||
16
.github/workflows/gor-signoz.yaml
vendored
@@ -3,8 +3,8 @@ name: gor-signoz
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+'
|
||||
- "v[0-9]+.[0-9]+.[0-9]+"
|
||||
- "v[0-9]+.[0-9]+.[0-9]+-rc.[0-9]+"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -35,8 +35,14 @@ jobs:
|
||||
echo 'POSTHOG_KEY="${{ secrets.POSTHOG_KEY }}"' >> .env
|
||||
echo 'PYLON_APP_ID="${{ secrets.PYLON_APP_ID }}"' >> .env
|
||||
echo 'APPCUES_APP_ID="${{ secrets.APPCUES_APP_ID }}"' >> .env
|
||||
echo 'PYLON_IDENTITY_SECRET="${{ secrets.PYLON_IDENTITY_SECRET }}"' >> .env
|
||||
echo 'DOCS_BASE_URL="https://signoz.io"' >> .env
|
||||
- name: node-setup
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: build-frontend
|
||||
run: make js-build
|
||||
run: make js-build
|
||||
- name: upload-frontend-artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -103,7 +109,7 @@ jobs:
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: release --config ${{ env.CONFIG_PATH }} --clean --split
|
||||
workdir: .
|
||||
env:
|
||||
@@ -160,7 +166,7 @@ jobs:
|
||||
if: steps.cache-linux.outputs.cache-hit == 'true' && steps.cache-darwin.outputs.cache-hit == 'true' # only run if caches hit
|
||||
with:
|
||||
distribution: goreleaser-pro
|
||||
version: '~> v2'
|
||||
version: "~> v2"
|
||||
args: continue --merge
|
||||
workdir: .
|
||||
env:
|
||||
|
||||
64
.github/workflows/integrationci.yaml
vendored
@@ -9,22 +9,53 @@ on:
|
||||
- labeled
|
||||
|
||||
jobs:
|
||||
fmtlint:
|
||||
if: |
|
||||
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- name: install
|
||||
run: |
|
||||
cd tests/integration && uv sync
|
||||
- name: fmt
|
||||
run: |
|
||||
make py-fmt
|
||||
- name: lint
|
||||
run: |
|
||||
make py-lint
|
||||
test:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
src:
|
||||
- bootstrap
|
||||
- auth
|
||||
- passwordauthn
|
||||
- callbackauthn
|
||||
- cloudintegrations
|
||||
- dashboard
|
||||
- logspipelines
|
||||
- preference
|
||||
- querier
|
||||
- role
|
||||
- ttl
|
||||
- alerts
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
clickhouse-version:
|
||||
- 24.1.2-alpine
|
||||
- 25.5.6
|
||||
- 25.10.1
|
||||
schema-migrator-version:
|
||||
- v0.128.1
|
||||
- v0.129.7
|
||||
postgres-version:
|
||||
- 15
|
||||
if: |
|
||||
@@ -38,15 +69,32 @@ jobs:
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.13
|
||||
- name: poetry
|
||||
- name: uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
- name: install
|
||||
run: |
|
||||
python -m pip install poetry==2.1.2
|
||||
python -m poetry config virtualenvs.in-project true
|
||||
cd tests/integration && poetry install --no-root
|
||||
cd tests/integration && uv sync
|
||||
- name: webdriver
|
||||
run: |
|
||||
wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add -
|
||||
echo "deb http://dl.google.com/linux/chrome/deb/ stable main" | sudo tee -a /etc/apt/sources.list.d/google-chrome.list
|
||||
sudo apt-get update -qqy
|
||||
sudo apt-get -qqy install google-chrome-stable
|
||||
CHROME_VERSION=$(google-chrome-stable --version)
|
||||
CHROME_FULL_VERSION=${CHROME_VERSION%%.*}
|
||||
CHROME_MAJOR_VERSION=${CHROME_FULL_VERSION//[!0-9]}
|
||||
sudo rm /etc/apt/sources.list.d/google-chrome.list
|
||||
export CHROMEDRIVER_VERSION=`curl -s https://googlechromelabs.github.io/chrome-for-testing/LATEST_RELEASE_${CHROME_MAJOR_VERSION%%.*}`
|
||||
curl -L -O "https://storage.googleapis.com/chrome-for-testing-public/${CHROMEDRIVER_VERSION}/linux64/chromedriver-linux64.zip"
|
||||
unzip chromedriver-linux64.zip
|
||||
chmod +x chromedriver-linux64/chromedriver
|
||||
sudo mv chromedriver-linux64/chromedriver /usr/local/bin/chromedriver
|
||||
chromedriver -version
|
||||
google-chrome-stable --version
|
||||
- name: run
|
||||
run: |
|
||||
cd tests/integration && \
|
||||
poetry run pytest \
|
||||
uv run pytest \
|
||||
--basetemp=./tmp/ \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
|
||||
13
.github/workflows/jsci.yaml
vendored
@@ -17,10 +17,23 @@ jobs:
|
||||
steps:
|
||||
- name: checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: setup node
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: "22"
|
||||
- name: install
|
||||
run: cd frontend && yarn install
|
||||
- name: tsc
|
||||
run: cd frontend && yarn tsc
|
||||
tsc2:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))
|
||||
uses: signoz/primus.workflows/.github/workflows/js-tsc.yaml@main
|
||||
secrets: inherit
|
||||
with:
|
||||
PRIMUS_REF: main
|
||||
JS_SRC: frontend
|
||||
test:
|
||||
if: |
|
||||
(github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
|
||||
|
||||
14
.gitignore
vendored
@@ -1,6 +1,9 @@
|
||||
|
||||
node_modules
|
||||
|
||||
.vscode
|
||||
!.vscode/settings.json
|
||||
|
||||
deploy/docker/environment_tiny/common_test
|
||||
frontend/node_modules
|
||||
frontend/.pnp
|
||||
@@ -12,7 +15,6 @@ frontend/coverage
|
||||
|
||||
# production
|
||||
frontend/build
|
||||
frontend/.vscode
|
||||
frontend/.yarnclean
|
||||
frontend/.temp_cache
|
||||
frontend/test-results
|
||||
@@ -31,7 +33,6 @@ frontend/src/constants/env.ts
|
||||
|
||||
.idea
|
||||
|
||||
**/.vscode
|
||||
**/build
|
||||
**/storage
|
||||
**/locust-scripts/__pycache__/
|
||||
@@ -49,13 +50,13 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
bin/
|
||||
.local/
|
||||
*/query-service/queries.active
|
||||
ee/query-service/db
|
||||
|
||||
# e2e
|
||||
|
||||
e2e/node_modules/
|
||||
@@ -105,7 +106,6 @@ dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
@@ -221,8 +221,6 @@ cython_debug/
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
@@ -230,4 +228,6 @@ poetry.toml
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
|
||||
# cursor files
|
||||
frontend/.cursor/
|
||||
|
||||
@@ -1,39 +1,64 @@
|
||||
version: "2"
|
||||
linters:
|
||||
default: standard
|
||||
default: none
|
||||
enable:
|
||||
- bodyclose
|
||||
- depguard
|
||||
- errcheck
|
||||
- forbidigo
|
||||
- govet
|
||||
- iface
|
||||
- ineffassign
|
||||
- misspell
|
||||
- nilnil
|
||||
- sloglint
|
||||
- depguard
|
||||
- iface
|
||||
- wastedassign
|
||||
- unparam
|
||||
- forbidigo
|
||||
|
||||
linters-settings:
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
msg-style: lowercased
|
||||
key-naming-case: snake
|
||||
depguard:
|
||||
rules:
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: "go.uber.org/zap"
|
||||
desc: "Do not use zap logger. Use slog instead."
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: "errors"
|
||||
desc: "Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead."
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
issues:
|
||||
exclude-dirs:
|
||||
- "pkg/query-service"
|
||||
- "ee/query-service"
|
||||
- "scripts/"
|
||||
- unused
|
||||
settings:
|
||||
depguard:
|
||||
rules:
|
||||
noerrors:
|
||||
deny:
|
||||
- pkg: errors
|
||||
desc: Do not use errors package. Use github.com/SigNoz/signoz/pkg/errors instead.
|
||||
nozap:
|
||||
deny:
|
||||
- pkg: go.uber.org/zap
|
||||
desc: Do not use zap logger. Use slog instead.
|
||||
forbidigo:
|
||||
forbid:
|
||||
- pattern: fmt.Errorf
|
||||
- pattern: ^(fmt\.Print.*|print|println)$
|
||||
iface:
|
||||
enable:
|
||||
- identical
|
||||
sloglint:
|
||||
no-mixed-args: true
|
||||
kv-only: true
|
||||
no-global: all
|
||||
context: all
|
||||
static-msg: true
|
||||
key-naming-case: snake
|
||||
exclusions:
|
||||
generated: lax
|
||||
presets:
|
||||
- comments
|
||||
- common-false-positives
|
||||
- legacy
|
||||
- std-error-handling
|
||||
paths:
|
||||
- pkg/query-service
|
||||
- ee/query-service
|
||||
- scripts/
|
||||
- tmp/
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
formatters:
|
||||
exclusions:
|
||||
generated: lax
|
||||
paths:
|
||||
- third_party$
|
||||
- builtin$
|
||||
- examples$
|
||||
|
||||
17
.mockery.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
# Link to template variables: https://pkg.go.dev/github.com/vektra/mockery/v3/config#TemplateData
|
||||
template: testify
|
||||
packages:
|
||||
github.com/SigNoz/signoz/pkg/alertmanager:
|
||||
config:
|
||||
all: true
|
||||
dir: '{{.InterfaceDir}}/alertmanagertest'
|
||||
filename: "alertmanager.go"
|
||||
structname: 'Mock{{.InterfaceName}}'
|
||||
pkgname: '{{.SrcPackageName}}test'
|
||||
github.com/SigNoz/signoz/pkg/tokenizer:
|
||||
config:
|
||||
all: true
|
||||
dir: '{{.InterfaceDir}}/tokenizertest'
|
||||
filename: "tokenizer.go"
|
||||
structname: 'Mock{{.InterfaceName}}'
|
||||
pkgname: '{{.SrcPackageName}}test'
|
||||
15
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"eslint.workingDirectories": [
|
||||
"./frontend"
|
||||
],
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"prettier.requireConfig": true,
|
||||
"[go]": {
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "golang.go"
|
||||
}
|
||||
}
|
||||
68
Makefile
@@ -72,6 +72,12 @@ devenv-up: devenv-clickhouse devenv-signoz-otel-collector ## Start both clickhou
|
||||
@echo " - ClickHouse: http://localhost:8123"
|
||||
@echo " - Signoz OTel Collector: grpc://localhost:4317, http://localhost:4318"
|
||||
|
||||
.PHONY: devenv-clickhouse-clean
|
||||
devenv-clickhouse-clean: ## Clean all ClickHouse data from filesystem
|
||||
@echo "Removing ClickHouse data..."
|
||||
@rm -rf .devenv/docker/clickhouse/fs/tmp/*
|
||||
@echo "ClickHouse data cleaned!"
|
||||
|
||||
##############################################################
|
||||
# go commands
|
||||
##############################################################
|
||||
@@ -80,14 +86,13 @@ go-run-enterprise: ## Runs the enterprise go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_ENTERPRISE)/*.go server
|
||||
|
||||
.PHONY: go-test
|
||||
go-test: ## Runs go unit tests
|
||||
@@ -98,14 +103,13 @@ go-run-community: ## Runs the community go backend server
|
||||
@SIGNOZ_INSTRUMENTATION_LOGS_LEVEL=debug \
|
||||
SIGNOZ_SQLSTORE_SQLITE_PATH=signoz.db \
|
||||
SIGNOZ_WEB_ENABLED=false \
|
||||
SIGNOZ_JWT_SECRET=secret \
|
||||
SIGNOZ_TOKENIZER_JWT_SECRET=secret \
|
||||
SIGNOZ_ALERTMANAGER_PROVIDER=signoz \
|
||||
SIGNOZ_TELEMETRYSTORE_PROVIDER=clickhouse \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_DSN=tcp://127.0.0.1:9000 \
|
||||
SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER=cluster \
|
||||
go run -race \
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server \
|
||||
--config ./conf/prometheus.yml \
|
||||
--cluster cluster
|
||||
$(GO_BUILD_CONTEXT_COMMUNITY)/*.go server
|
||||
|
||||
.PHONY: go-build-community $(GO_BUILD_ARCHS_COMMUNITY)
|
||||
go-build-community: ## Builds the go backend server for community
|
||||
@@ -114,9 +118,9 @@ $(GO_BUILD_ARCHS_COMMUNITY): go-build-community-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)-community"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_COMMUNITY) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME)-community -ldflags "-s -w $(GO_BUILD_LDFLAGS_COMMUNITY)"; \
|
||||
fi
|
||||
|
||||
|
||||
@@ -127,9 +131,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE): go-build-enterprise-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
.PHONY: go-build-enterprise-race $(GO_BUILD_ARCHS_ENTERPRISE_RACE)
|
||||
@@ -139,9 +143,9 @@ $(GO_BUILD_ARCHS_ENTERPRISE_RACE): go-build-enterprise-race-%: $(TARGET_DIR)
|
||||
@mkdir -p $(TARGET_DIR)/$(OS)-$*
|
||||
@echo ">> building binary $(TARGET_DIR)/$(OS)-$*/$(NAME)"
|
||||
@if [ $* = "arm64" ]; then \
|
||||
CC=aarch64-linux-gnu-gcc CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
else \
|
||||
CGO_ENABLED=1 GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-linkmode external -extldflags '-static' -s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
GOARCH=$* GOOS=$(OS) go build -C $(GO_BUILD_CONTEXT_ENTERPRISE) -race -tags timetzdata -o $(TARGET_DIR)/$(OS)-$*/$(NAME) -ldflags "-s -w $(GO_BUILD_LDFLAGS_ENTERPRISE)"; \
|
||||
fi
|
||||
|
||||
##############################################################
|
||||
@@ -198,14 +202,40 @@ docker-buildx-enterprise: go-build-enterprise js-build
|
||||
##############################################################
|
||||
.PHONY: py-fmt
|
||||
py-fmt: ## Run black for integration tests
|
||||
@cd tests/integration && poetry run black .
|
||||
@cd tests/integration && uv run black .
|
||||
|
||||
.PHONY: py-lint
|
||||
py-lint: ## Run lint for integration tests
|
||||
@cd tests/integration && poetry run isort .
|
||||
@cd tests/integration && poetry run autoflake .
|
||||
@cd tests/integration && poetry run pylint .
|
||||
@cd tests/integration && uv run isort .
|
||||
@cd tests/integration && uv run autoflake .
|
||||
@cd tests/integration && uv run pylint .
|
||||
|
||||
.PHONY: py-test-setup
|
||||
py-test-setup: ## Runs integration tests
|
||||
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --reuse --capture=no src/bootstrap/setup.py::test_setup
|
||||
|
||||
.PHONY: py-test-teardown
|
||||
py-test-teardown: ## Runs integration tests with teardown
|
||||
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --teardown --capture=no src/bootstrap/setup.py::test_teardown
|
||||
|
||||
.PHONY: py-test
|
||||
py-test: ## Runs integration tests
|
||||
@cd tests/integration && poetry run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
@cd tests/integration && uv run pytest --basetemp=./tmp/ -vv --capture=no src/
|
||||
|
||||
.PHONY: py-clean
|
||||
py-clean: ## Clear all pycache and pytest cache from tests directory recursively
|
||||
@echo ">> cleaning python cache files from tests directory"
|
||||
@find tests -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
|
||||
@find tests -type d -name .pytest_cache -exec rm -rf {} + 2>/dev/null || true
|
||||
@find tests -type f -name "*.pyc" -delete 2>/dev/null || true
|
||||
@find tests -type f -name "*.pyo" -delete 2>/dev/null || true
|
||||
@echo ">> python cache cleaned"
|
||||
|
||||
|
||||
##############################################################
|
||||
# generate commands
|
||||
##############################################################
|
||||
.PHONY: gen-mocks
|
||||
gen-mocks:
|
||||
@echo ">> Generating mocks"
|
||||
@mockery --config .mockery.yml
|
||||
13
README.md
@@ -66,6 +66,17 @@ Read [more](https://signoz.io/metrics-and-dashboards/).
|
||||
|
||||

|
||||
|
||||
### LLM Observability
|
||||
|
||||
Monitor and debug your LLM applications with comprehensive observability. Track LLM calls, analyze token usage, monitor performance, and gain insights into your AI application's behavior in production.
|
||||
|
||||
SigNoz LLM observability helps you understand how your language models are performing, identify issues with prompts and responses, track token usage and costs, and optimize your AI applications for better performance and reliability.
|
||||
|
||||
[Get started with LLM Observability →](https://signoz.io/docs/llm-observability/)
|
||||
|
||||

|
||||
|
||||
|
||||
### Alerts
|
||||
|
||||
Use alerts in SigNoz to get notified when anything unusual happens in your application. You can set alerts on any type of telemetry signal (logs, metrics, traces), create thresholds and set up a notification channel to get notified. Advanced features like alert history and anomaly detection can help you create smarter alerts.
|
||||
@@ -236,7 +247,7 @@ Not sure how to get started? Just ping us on `#contributing` in our [slack commu
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/grandwizard28)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
@@ -12,12 +12,6 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/community
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -36,8 +30,6 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.time={{ .CommitTimestamp }}
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -13,6 +13,7 @@ func main() {
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
cmd.RegisterGenerate(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -3,20 +3,29 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
@@ -49,19 +58,9 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
// print the version
|
||||
version.Info.PrettyPrint(config.Version)
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
jwt,
|
||||
zeus.Config{},
|
||||
noopzeus.NewProviderFactory(),
|
||||
licensing.Config{},
|
||||
@@ -76,13 +75,28 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
},
|
||||
signoz.NewSQLStoreProviderFactories(),
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
return signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, _ role.Setter, _ role.Granter, queryParser queryparser.QueryParser, _ querier.Querier, _ licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(impldashboard.NewStore(store), settings, analytics, orgGetter, queryParser)
|
||||
},
|
||||
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return noopgateway.NewProviderFactory()
|
||||
},
|
||||
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, _ []role.RegisterTypeable) role.Setter {
|
||||
return implrole.NewSetter(implrole.NewStore(store), authz)
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
server, err := app.NewServer(config, signoz, jwt)
|
||||
server, err := app.NewServer(config, signoz)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
||||
return err
|
||||
|
||||
@@ -3,7 +3,6 @@ package cmd
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/config"
|
||||
"github.com/SigNoz/signoz/pkg/config/envprovider"
|
||||
@@ -30,12 +29,3 @@ func NewSigNozConfig(ctx context.Context, logger *slog.Logger, flags signoz.Depr
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
func NewJWTSecret(ctx context.Context, logger *slog.Logger) string {
|
||||
jwtSecret := os.Getenv("SIGNOZ_JWT_SECRET")
|
||||
if len(jwtSecret) == 0 {
|
||||
logger.ErrorContext(ctx, "🚨 CRITICAL SECURITY ISSUE: No JWT secret key specified!", "error", "SIGNOZ_JWT_SECRET environment variable is not set. This has dire consequences for the security of the application. Without a JWT secret, user sessions are vulnerable to tampering and unauthorized access. Please set the SIGNOZ_JWT_SECRET environment variable immediately. For more information, please refer to https://github.com/SigNoz/signoz/issues/8400.")
|
||||
}
|
||||
|
||||
return jwtSecret
|
||||
}
|
||||
|
||||
@@ -12,12 +12,6 @@ builds:
|
||||
- id: signoz
|
||||
binary: bin/signoz
|
||||
main: ./cmd/enterprise
|
||||
env:
|
||||
- CGO_ENABLED=1
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}
|
||||
{{- if eq .Arch "arm64" }}CC=aarch64-linux-gnu-gcc{{- end }}
|
||||
{{- end }}
|
||||
goos:
|
||||
- linux
|
||||
- darwin
|
||||
@@ -37,11 +31,8 @@ builds:
|
||||
- -X github.com/SigNoz/signoz/pkg/version.branch={{ .Branch }}
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.url=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/zeus.deprecatedURL=https://license.signoz.io
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.ZeusURL=https://api.signoz.cloud
|
||||
- -X github.com/SigNoz/signoz/ee/query-service/constants.LicenseSignozIo=https://license.signoz.io/api/v1
|
||||
- -X github.com/SigNoz/signoz/pkg/analytics.key=9kRrJ7oPCGPEJLF6QjMPLt5bljFhRQBr
|
||||
- >-
|
||||
{{- if eq .Os "linux" }}-linkmode external -extldflags '-static'{{- end }}
|
||||
mod_timestamp: "{{ .CommitTimestamp }}"
|
||||
tags:
|
||||
- timetzdata
|
||||
|
||||
@@ -1,11 +1,3 @@
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
@@ -40,8 +32,6 @@ COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
COPY --from=build /opt/build ./web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
|
||||
47
cmd/enterprise/Dockerfile.with-web.integration
Normal file
@@ -0,0 +1,47 @@
|
||||
FROM node:18-bullseye AS build
|
||||
|
||||
WORKDIR /opt/
|
||||
COPY ./frontend/ ./
|
||||
ENV NODE_OPTIONS=--max-old-space-size=8192
|
||||
RUN CI=1 yarn install
|
||||
RUN CI=1 yarn build
|
||||
|
||||
FROM golang:1.24-bullseye
|
||||
|
||||
ARG OS="linux"
|
||||
ARG TARGETARCH
|
||||
ARG ZEUSURL
|
||||
|
||||
# This path is important for stacktraces
|
||||
WORKDIR $GOPATH/src/github.com/signoz/signoz
|
||||
WORKDIR /root
|
||||
|
||||
RUN set -eux; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends \
|
||||
g++ \
|
||||
gcc \
|
||||
libc6-dev \
|
||||
make \
|
||||
pkg-config \
|
||||
; \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
RUN go mod download
|
||||
|
||||
COPY ./cmd/ ./cmd/
|
||||
COPY ./ee/ ./ee/
|
||||
COPY ./pkg/ ./pkg/
|
||||
COPY ./templates/email /root/templates
|
||||
|
||||
COPY Makefile Makefile
|
||||
RUN TARGET_DIR=/root ARCHS=${TARGETARCH} ZEUS_URL=${ZEUSURL} LICENSE_URL=${ZEUSURL}/api/v1 make go-build-enterprise-race
|
||||
RUN mv /root/linux-${TARGETARCH}/signoz /root/signoz
|
||||
|
||||
COPY --from=build /opt/build ./web/
|
||||
|
||||
RUN chmod 755 /root /root/signoz
|
||||
|
||||
ENTRYPOINT ["/root/signoz", "server"]
|
||||
@@ -13,6 +13,7 @@ func main() {
|
||||
|
||||
// register a list of commands to the root command
|
||||
registerServer(cmd.RootCmd, logger)
|
||||
cmd.RegisterGenerate(cmd.RootCmd, logger)
|
||||
|
||||
cmd.Execute(logger)
|
||||
}
|
||||
|
||||
@@ -6,17 +6,33 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/cmd"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/ee/authz/openfgaschema"
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/ee/modules/role/implrole"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
"github.com/SigNoz/signoz/ee/sqlschema/postgressqlschema"
|
||||
"github.com/SigNoz/signoz/ee/sqlstore/postgressqlstore"
|
||||
enterprisezeus "github.com/SigNoz/signoz/ee/zeus"
|
||||
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
pkgimplrole "github.com/SigNoz/signoz/pkg/modules/role/implrole"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -54,17 +70,14 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
|
||||
// add enterprise sqlstore factories to the community sqlstore factories
|
||||
sqlstoreFactories := signoz.NewSQLStoreProviderFactories()
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory())); err != nil {
|
||||
if err := sqlstoreFactories.Add(postgressqlstore.NewFactory(sqlstorehook.NewLoggingFactory(), sqlstorehook.NewInstrumentationFactory())); err != nil {
|
||||
logger.ErrorContext(ctx, "failed to add postgressqlstore factory", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
jwt := authtypes.NewJWT(cmd.NewJWTSecret(ctx, logger), 30*time.Minute, 30*24*time.Hour)
|
||||
|
||||
signoz, err := signoz.New(
|
||||
ctx,
|
||||
config,
|
||||
jwt,
|
||||
enterprisezeus.Config(),
|
||||
httpzeus.NewProviderFactory(),
|
||||
enterpriselicensing.Config(24*time.Hour, 3),
|
||||
@@ -84,13 +97,47 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
},
|
||||
sqlstoreFactories,
|
||||
signoz.NewTelemetryStoreProviderFactories(),
|
||||
func(ctx context.Context, providerSettings factory.ProviderSettings, store authtypes.AuthNStore, licensing licensing.Licensing) (map[authtypes.AuthNProvider]authn.AuthN, error) {
|
||||
samlCallbackAuthN, err := samlcallbackauthn.New(ctx, store, licensing)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
oidcCallbackAuthN, err := oidccallbackauthn.New(store, licensing, providerSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
authNs, err := signoz.NewAuthNs(ctx, providerSettings, store, licensing)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
authNs[authtypes.AuthNProviderSAML] = samlCallbackAuthN
|
||||
authNs[authtypes.AuthNProviderOIDC] = oidcCallbackAuthN
|
||||
|
||||
return authNs, nil
|
||||
},
|
||||
func(ctx context.Context, sqlstore sqlstore.SQLStore) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx))
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, roleSetter, granter, queryParser, querier, licensing)
|
||||
},
|
||||
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return httpgateway.NewProviderFactory(licensing)
|
||||
},
|
||||
func(store sqlstore.SQLStore, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
|
||||
return implrole.NewSetter(pkgimplrole.NewStore(store), authz, licensing, registry)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", "error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
server, err := enterpriseapp.NewServer(config, signoz, jwt)
|
||||
server, err := enterpriseapp.NewServer(config, signoz)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create server", "error", err)
|
||||
return err
|
||||
|
||||
21
cmd/generate.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func RegisterGenerate(parentCmd *cobra.Command, logger *slog.Logger) {
|
||||
var generateCmd = &cobra.Command{
|
||||
Use: "generate",
|
||||
Short: "Generate artifacts",
|
||||
SilenceUsage: true,
|
||||
SilenceErrors: true,
|
||||
CompletionOptions: cobra.CompletionOptions{DisableDefaultCmd: true},
|
||||
}
|
||||
|
||||
registerGenerateOpenAPI(generateCmd)
|
||||
|
||||
parentCmd.AddCommand(generateCmd)
|
||||
}
|
||||
41
cmd/openapi.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func registerGenerateOpenAPI(parentCmd *cobra.Command) {
|
||||
openapiCmd := &cobra.Command{
|
||||
Use: "openapi",
|
||||
Short: "Generate OpenAPI schema for SigNoz",
|
||||
RunE: func(currCmd *cobra.Command, args []string) error {
|
||||
return runGenerateOpenAPI(currCmd.Context())
|
||||
},
|
||||
}
|
||||
|
||||
parentCmd.AddCommand(openapiCmd)
|
||||
}
|
||||
|
||||
func runGenerateOpenAPI(ctx context.Context) error {
|
||||
instrumentation, err := instrumentation.New(ctx, instrumentation.Config{Logs: instrumentation.LogsConfig{Level: slog.LevelInfo}}, version.Info, "signoz")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
openapi, err := signoz.NewOpenAPI(ctx, instrumentation)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := openapi.CreateAndWrite("docs/api/openapi.yml"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
97
cmd/zap.go
@@ -1,6 +1,10 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"go.uber.org/zap" //nolint:depguard
|
||||
"go.uber.org/zap/zapcore" //nolint:depguard
|
||||
)
|
||||
@@ -10,6 +14,97 @@ func newZapLogger() *zap.Logger {
|
||||
config := zap.NewProductionConfig()
|
||||
config.EncoderConfig.TimeKey = "timestamp"
|
||||
config.EncoderConfig.EncodeTime = zapcore.ISO8601TimeEncoder
|
||||
|
||||
// Extract sampling config before building the logger.
|
||||
// We need to disable sampling in the config and apply it manually later
|
||||
// to ensure correct core ordering. See filteringCore documentation for details.
|
||||
samplerConfig := config.Sampling
|
||||
config.Sampling = nil
|
||||
|
||||
logger, _ := config.Build()
|
||||
return logger
|
||||
|
||||
// Wrap with custom core wrapping to filter certain log entries.
|
||||
// The order of wrapping is important:
|
||||
// 1. First wrap with filteringCore
|
||||
// 2. Then wrap with sampler
|
||||
//
|
||||
// This creates the call chain: sampler -> filteringCore -> ioCore
|
||||
//
|
||||
// During logging:
|
||||
// - sampler.Check decides whether to sample the log entry
|
||||
// - If sampled, filteringCore.Check is called
|
||||
// - filteringCore adds itself to CheckedEntry.cores
|
||||
// - All cores in CheckedEntry.cores have their Write method called
|
||||
// - filteringCore.Write can now filter the entry before passing to ioCore
|
||||
//
|
||||
// If we didn't disable the sampler above, filteringCore would have wrapped
|
||||
// sampler. By calling sampler.Check we would have allowed it to call
|
||||
// ioCore.Check that adds itself to CheckedEntry.cores. Then ioCore.Write
|
||||
// would have bypassed our checks, making filtering impossible.
|
||||
return logger.WithOptions(zap.WrapCore(func(core zapcore.Core) zapcore.Core {
|
||||
core = &filteringCore{core}
|
||||
if samplerConfig != nil {
|
||||
core = zapcore.NewSamplerWithOptions(
|
||||
core,
|
||||
time.Second,
|
||||
samplerConfig.Initial,
|
||||
samplerConfig.Thereafter,
|
||||
)
|
||||
}
|
||||
return core
|
||||
}))
|
||||
}
|
||||
|
||||
// filteringCore wraps a zapcore.Core to filter out log entries based on a
|
||||
// custom logic.
|
||||
//
|
||||
// Note: This core must be positioned before the sampler in the core chain
|
||||
// to ensure Write is called. See newZapLogger for ordering details.
|
||||
type filteringCore struct {
|
||||
zapcore.Core
|
||||
}
|
||||
|
||||
// filter determines whether a log entry should be written based on its fields.
|
||||
// Returns false if the entry should be suppressed, true otherwise.
|
||||
//
|
||||
// Current filters:
|
||||
// - context.Canceled: These are expected errors from cancelled operations,
|
||||
// and create noise in logs.
|
||||
func (c *filteringCore) filter(fields []zapcore.Field) bool {
|
||||
for _, field := range fields {
|
||||
if field.Type == zapcore.ErrorType {
|
||||
if loggedErr, ok := field.Interface.(error); ok {
|
||||
// Suppress logs containing context.Canceled errors
|
||||
if errors.Is(loggedErr, context.Canceled) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// With implements zapcore.Core.With
|
||||
// It returns a new copy with the added context.
|
||||
func (c *filteringCore) With(fields []zapcore.Field) zapcore.Core {
|
||||
return &filteringCore{c.Core.With(fields)}
|
||||
}
|
||||
|
||||
// Check implements zapcore.Core.Check.
|
||||
// It adds this core to the CheckedEntry if the log level is enabled,
|
||||
// ensuring that Write will be called for this entry.
|
||||
func (c *filteringCore) Check(ent zapcore.Entry, ce *zapcore.CheckedEntry) *zapcore.CheckedEntry {
|
||||
if c.Enabled(ent.Level) {
|
||||
return ce.AddCore(ent, c)
|
||||
}
|
||||
return ce
|
||||
}
|
||||
|
||||
// Write implements zapcore.Core.Write.
|
||||
// It filters log entries based on their fields before delegating to the wrapped core.
|
||||
func (c *filteringCore) Write(ent zapcore.Entry, fields []zapcore.Field) error {
|
||||
if !c.filter(fields) {
|
||||
return nil
|
||||
}
|
||||
return c.Core.Write(ent, fields)
|
||||
}
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
##################### SigNoz Configuration Example #####################
|
||||
#
|
||||
#
|
||||
# Do not modify this file
|
||||
#
|
||||
|
||||
##################### Global #####################
|
||||
global:
|
||||
# the url under which the signoz apiserver is externally reachable.
|
||||
external_url: <unset>
|
||||
# the url where the SigNoz backend receives telemetry data (traces, metrics, logs) from instrumented applications.
|
||||
ingestion_url: <unset>
|
||||
|
||||
##################### Version #####################
|
||||
version:
|
||||
banner:
|
||||
@@ -47,10 +54,10 @@ cache:
|
||||
provider: memory
|
||||
# memory: Uses in-memory caching.
|
||||
memory:
|
||||
# Time-to-live for cache entries in memory. Specify the duration in ns
|
||||
ttl: 60000000000
|
||||
# The interval at which the cache will be cleaned up
|
||||
cleanup_interval: 1m
|
||||
# Max items for the in-memory cache (10x the entries)
|
||||
num_counters: 100000
|
||||
# Total cost in bytes allocated bounded cache
|
||||
max_cost: 67108864
|
||||
# redis: Uses Redis as the caching backend.
|
||||
redis:
|
||||
# The hostname or IP address of the Redis server.
|
||||
@@ -58,7 +65,7 @@ cache:
|
||||
# The port on which the Redis server is running. Default is usually 6379.
|
||||
port: 6379
|
||||
# The password for authenticating with the Redis server, if required.
|
||||
password:
|
||||
password:
|
||||
# The Redis database number to use
|
||||
db: 0
|
||||
|
||||
@@ -71,6 +78,10 @@ sqlstore:
|
||||
sqlite:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
# Mode is the mode to use for the sqlite database.
|
||||
mode: delete
|
||||
# BusyTimeout is the timeout for the sqlite database to wait for a lock.
|
||||
busy_timeout: 10s
|
||||
|
||||
##################### APIServer #####################
|
||||
apiserver:
|
||||
@@ -238,8 +249,54 @@ statsreporter:
|
||||
# Whether to collect identities and traits (emails).
|
||||
identities: true
|
||||
|
||||
|
||||
##################### Gateway (License only) #####################
|
||||
gateway:
|
||||
# The URL of the gateway's api.
|
||||
url: http://localhost:8080
|
||||
|
||||
##################### Tokenizer #####################
|
||||
tokenizer:
|
||||
# Specifies the tokenizer provider to use.
|
||||
provider: jwt
|
||||
lifetime:
|
||||
# The duration for which a user can be idle before being required to authenticate.
|
||||
idle: 168h
|
||||
# The duration for which a user can remain logged in before being asked to login.
|
||||
max: 720h
|
||||
rotation:
|
||||
# The interval to rotate tokens in.
|
||||
interval: 30m
|
||||
# The duration for which the previous token pair remains valid after a token pair is rotated.
|
||||
duration: 60s
|
||||
jwt:
|
||||
# The secret to sign the JWT tokens.
|
||||
secret: secret
|
||||
opaque:
|
||||
gc:
|
||||
# The interval to perform garbage collection.
|
||||
interval: 1h
|
||||
token:
|
||||
# The maximum number of tokens a user can have. This limits the number of concurrent sessions a user can have.
|
||||
max_per_user: 5
|
||||
|
||||
##################### Flagger #####################
|
||||
flagger:
|
||||
# Config are the overrides for the feature flags which come directly from the config file.
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
integer:
|
||||
object:
|
||||
|
||||
##################### User #####################
|
||||
user:
|
||||
password:
|
||||
reset:
|
||||
# Whether to allow users to reset their password themselves.
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
|
||||
4
context7.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"url": "https://context7.com/signoz/signoz",
|
||||
"public_key": "pk_6g9GfjdkuPEIDuTGAxnol"
|
||||
}
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.95.0
|
||||
image: signoz/signoz:v0.110.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -195,7 +195,7 @@ services:
|
||||
- GODEBUG=netdns=go
|
||||
- TELEMETRY_ENABLED=true
|
||||
- DEPLOYMENT_TYPE=docker-swarm
|
||||
- SIGNOZ_JWT_SECRET=secret
|
||||
- SIGNOZ_TOKENIZER_JWT_SECRET=secret
|
||||
- DOT_METRICS_ENABLED=true
|
||||
healthcheck:
|
||||
test:
|
||||
@@ -209,7 +209,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.5
|
||||
image: signoz/signoz-otel-collector:v0.129.13
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -233,7 +233,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.5
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.95.0
|
||||
image: signoz/signoz:v0.110.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
@@ -150,7 +150,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:v0.129.5
|
||||
image: signoz/signoz-otel-collector:v0.129.13
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
- --manager-config=/etc/manager-config.yaml
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
- signoz
|
||||
schema-migrator:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:v0.129.5
|
||||
image: signoz/signoz-schema-migrator:v0.129.13
|
||||
deploy:
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -21,6 +28,10 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -66,6 +77,11 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -77,16 +93,20 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.95.0}
|
||||
image: signoz/signoz:${VERSION:-v0.110.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -213,7 +213,7 @@ services:
|
||||
# TODO: support otel-collector multiple replicas. Nginx/Traefik for loadbalancing?
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.5}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.13}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -239,7 +239,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.5}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -250,7 +250,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.5}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.95.0}
|
||||
image: signoz/signoz:${VERSION:-v0.110.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
@@ -144,7 +144,7 @@ services:
|
||||
retries: 3
|
||||
otel-collector:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.5}
|
||||
image: signoz/signoz-otel-collector:${OTELCOL_TAG:-v0.129.13}
|
||||
container_name: signoz-otel-collector
|
||||
command:
|
||||
- --config=/etc/otel-collector-config.yaml
|
||||
@@ -166,7 +166,7 @@ services:
|
||||
condition: service_healthy
|
||||
schema-migrator-sync:
|
||||
!!merge <<: *common
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.5}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
container_name: schema-migrator-sync
|
||||
command:
|
||||
- sync
|
||||
@@ -178,7 +178,7 @@ services:
|
||||
restart: on-failure
|
||||
schema-migrator-async:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.5}
|
||||
image: signoz/signoz-schema-migrator:${OTELCOL_TAG:-v0.129.13}
|
||||
container_name: schema-migrator-async
|
||||
command:
|
||||
- async
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
connectors:
|
||||
signozmeter:
|
||||
metrics_flush_interval: 1h
|
||||
dimensions:
|
||||
- name: service.name
|
||||
- name: deployment.environment
|
||||
- name: host.name
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
@@ -21,6 +28,10 @@ processors:
|
||||
send_batch_size: 10000
|
||||
send_batch_max_size: 11000
|
||||
timeout: 10s
|
||||
batch/meter:
|
||||
send_batch_max_size: 25000
|
||||
send_batch_size: 20000
|
||||
timeout: 1s
|
||||
resourcedetection:
|
||||
# Using OTEL_RESOURCE_ATTRIBUTES envvar, env detector adds custom labels.
|
||||
detectors: [env, system]
|
||||
@@ -66,6 +77,11 @@ exporters:
|
||||
dsn: tcp://clickhouse:9000/signoz_logs
|
||||
timeout: 10s
|
||||
use_new_schema: true
|
||||
signozclickhousemeter:
|
||||
dsn: tcp://clickhouse:9000/signoz_meter
|
||||
timeout: 45s
|
||||
sending_queue:
|
||||
enabled: false
|
||||
service:
|
||||
telemetry:
|
||||
logs:
|
||||
@@ -77,16 +93,20 @@ service:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [signozspanmetrics/delta, batch]
|
||||
exporters: [clickhousetraces]
|
||||
exporters: [clickhousetraces, signozmeter]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
metrics/prometheus:
|
||||
receivers: [prometheus]
|
||||
processors: [batch]
|
||||
exporters: [signozclickhousemetrics]
|
||||
exporters: [signozclickhousemetrics, signozmeter]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
processors: [batch]
|
||||
exporters: [clickhouselogsexporter]
|
||||
exporters: [clickhouselogsexporter, signozmeter]
|
||||
metrics/meter:
|
||||
receivers: [signozmeter]
|
||||
processors: [batch/meter]
|
||||
exporters: [signozclickhousemeter]
|
||||
|
||||
4344
docs/api/openapi.yml
Normal file
@@ -13,8 +13,6 @@ Before diving in, make sure you have these tools installed:
|
||||
- Download from [go.dev/dl](https://go.dev/dl/)
|
||||
- Check [go.mod](../../go.mod#L3) for the minimum version
|
||||
|
||||
- **GCC** - Required for CGO dependencies
|
||||
- Download from [gcc.gnu.org](https://gcc.gnu.org/)
|
||||
|
||||
- **Node** - Powers our frontend
|
||||
- Download from [nodejs.org](https://nodejs.org)
|
||||
|
||||
134
docs/contributing/go/flagger.md
Normal file
@@ -0,0 +1,134 @@
|
||||
# Flagger
|
||||
|
||||
Flagger is SigNoz's feature flagging system built on top of the [OpenFeature](https://openfeature.dev/) standard. It provides a unified interface for evaluating feature flags across the application, allowing features to be enabled, disabled, or configured dynamically without code changes.
|
||||
|
||||
> 💡 **Note**: OpenFeature is a CNCF project that provides a vendor-agnostic feature flagging API, making it easy to switch providers without changing application code.
|
||||
|
||||
## How does it work?
|
||||
|
||||
Flagger consists of three main components:
|
||||
|
||||
1. **Registry** (`pkg/flagger/registry.go`) - Contains all available feature flags with their metadata and default values
|
||||
2. **Flagger** (`pkg/flagger/flagger.go`) - The consumer-facing interface for evaluating feature flags
|
||||
3. **Providers** (`pkg/flagger/<provider>flagger/`) - Implementations that supply feature flag values (e.g., `configflagger` for config-based flags)
|
||||
|
||||
The evaluation flow works as follows:
|
||||
|
||||
1. The caller requests a feature flag value via the `Flagger` interface
|
||||
2. Flagger checks the registry to validate the flag exists and get its default value
|
||||
3. Each registered provider is queried for an override value
|
||||
4. If a provider returns a value different from the default, that value is returned
|
||||
5. Otherwise, the default value from the registry is returned
|
||||
|
||||
## How to add a new feature flag?
|
||||
|
||||
### 1. Register the flag in the registry
|
||||
|
||||
Add your feature flag definition in `pkg/flagger/registry.go`:
|
||||
|
||||
```go
|
||||
var (
|
||||
// Export the feature name for use in evaluations
|
||||
FeatureMyNewFeature = featuretypes.MustNewName("my_new_feature")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
registry, err := featuretypes.NewRegistry(
|
||||
// ...existing features...
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureMyNewFeature,
|
||||
Kind: featuretypes.KindBoolean, // or KindString, KindFloat, KindInt, KindObject
|
||||
Stage: featuretypes.StageStable, // or StageAlpha, StageBeta
|
||||
Description: "Controls whether my new feature is enabled",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
)
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
> 💡 **Note**: Feature names must match the regex `^[a-z_]+$` (lowercase letters and underscores only).
|
||||
|
||||
### 2. Configure the feature flag value (optional)
|
||||
|
||||
To override the default value, add an entry in your configuration file:
|
||||
|
||||
```yaml
|
||||
flagger:
|
||||
config:
|
||||
boolean:
|
||||
my_new_feature: true
|
||||
```
|
||||
|
||||
Supported configuration types:
|
||||
|
||||
| Type | Config Key | Go Type |
|
||||
|------|------------|---------|
|
||||
| Boolean | `boolean` | `bool` |
|
||||
| String | `string` | `string` |
|
||||
| Float | `float` | `float64` |
|
||||
| Integer | `integer` | `int64` |
|
||||
| Object | `object` | `any` |
|
||||
|
||||
## How to evaluate a feature flag?
|
||||
|
||||
Use the `Flagger` interface to evaluate feature flags. The interface provides typed methods for each value type:
|
||||
|
||||
```go
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
)
|
||||
|
||||
func DoSomething(ctx context.Context, flagger flagger.Flagger) error {
|
||||
// Create an evaluation context (typically with org ID)
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
|
||||
// Evaluate with error handling
|
||||
enabled, err := flagger.Boolean(ctx, flagger.FeatureMyNewFeature, evalCtx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if enabled {
|
||||
// Feature is enabled
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
### Empty variants
|
||||
|
||||
For cases where you want to use a default value on error (and log the error), use the `*OrEmpty` methods:
|
||||
|
||||
```go
|
||||
func DoSomething(ctx context.Context, flagger flagger.Flagger) {
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
|
||||
// Returns false on error and logs the error
|
||||
if flagger.BooleanOrEmpty(ctx, flagger.FeatureMyNewFeature, evalCtx) {
|
||||
// Feature is enabled
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Available evaluation methods
|
||||
|
||||
| Method | Return Type | Empty Variant Default |
|
||||
|--------|-------------|---------------------|
|
||||
| `Boolean()` | `(bool, error)` | `false` |
|
||||
| `String()` | `(string, error)` | `""` |
|
||||
| `Float()` | `(float64, error)` | `0.0` |
|
||||
| `Int()` | `(int64, error)` | `0` |
|
||||
| `Object()` | `(any, error)` | `struct{}{}` |
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- Always define feature flags in the registry (`pkg/flagger/registry.go`) before using them
|
||||
- Use descriptive feature names that clearly indicate what the flag controls
|
||||
- Prefer `*OrEmpty` methods for non-critical features to avoid error handling overhead
|
||||
- Export feature name variables (e.g., `FeatureMyNewFeature`) for type-safe usage across packages
|
||||
- Consider the feature's lifecycle stage (`Alpha`, `Beta`, `Stable`) when defining it
|
||||
- Providers are evaluated in order; the first non-default value wins
|
||||
179
docs/contributing/go/handler.md
Normal file
@@ -0,0 +1,179 @@
|
||||
# Handler
|
||||
|
||||
Handlers in SigNoz are responsible for exposing module functionality over HTTP. They are thin adapters that:
|
||||
|
||||
- Decode incoming HTTP requests
|
||||
- Call the appropriate module layer
|
||||
- Return structured responses (or errors) in a consistent format
|
||||
- Describe themselves for OpenAPI generation
|
||||
|
||||
They are **not** the place for complex business logic; that belongs in modules (for example, `pkg/modules/user`, `pkg/modules/session`, etc).
|
||||
|
||||
## How are handlers structured?
|
||||
|
||||
At a high level, a typical flow looks like this:
|
||||
|
||||
1. A `Handler` interface is defined in the module (for example, `user.Handler`, `session.Handler`, `organization.Handler`).
|
||||
2. The `apiserver` provider wires those handlers into HTTP routes using Gorilla `mux.Router`.
|
||||
|
||||
Each route wraps a module handler method with the following:
|
||||
- Authorization middleware (from `pkg/http/middleware`)
|
||||
- A generic HTTP `handler.Handler` (from `pkg/http/handler`)
|
||||
- An `OpenAPIDef` that describes the operation for OpenAPI generation
|
||||
|
||||
For example, in `pkg/apiserver/signozapiserver`:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v1/invite", handler.New(
|
||||
provider.authZ.AdminAccess(provider.userHandler.CreateInvite),
|
||||
handler.OpenAPIDef{
|
||||
ID: "CreateInvite",
|
||||
Tags: []string{"users"},
|
||||
Summary: "Create invite",
|
||||
Description: "This endpoint creates an invite for a user",
|
||||
Request: new(types.PostableInvite),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(types.Invite),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
In this pattern:
|
||||
|
||||
- `provider.userHandler.CreateInvite` is a handler method.
|
||||
- `provider.authZ.AdminAccess(...)` wraps that method with authorization checks and context setup.
|
||||
- `handler.New` converts it into an HTTP handler and wires it to OpenAPI via the `OpenAPIDef`.
|
||||
|
||||
## How to write a new handler method?
|
||||
|
||||
When adding a new endpoint:
|
||||
|
||||
1. Add a method to the appropriate module `Handler` interface.
|
||||
2. Implement that method in the module.
|
||||
3. Register the method in `signozapiserver` with the correct route, HTTP method, auth, and `OpenAPIDef`.
|
||||
|
||||
### 1. Extend an existing `Handler` interface or create a new one
|
||||
|
||||
Find the module in `pkg/modules/<name>` and extend its `Handler` interface with a new method that receives an `http.ResponseWriter` and `*http.Request`. For example:
|
||||
|
||||
```go
|
||||
type Handler interface {
|
||||
// existing methods...
|
||||
CreateThing(rw http.ResponseWriter, req *http.Request)
|
||||
}
|
||||
```
|
||||
|
||||
Keep the method focused on HTTP concerns and delegate business logic to the module.
|
||||
|
||||
### 2. Implement the handler method
|
||||
|
||||
In the module implementation, implement the new method. A typical implementation:
|
||||
|
||||
- Extracts authentication and organization context from `req.Context()`
|
||||
- Decodes the request body into a `types.*` struct using the `binding` package
|
||||
- Calls module functions
|
||||
- Uses the `render` package to write responses or errors
|
||||
|
||||
```go
|
||||
func (h *handler) CreateThing(rw http.ResponseWriter, req *http.Request) {
|
||||
// Extract authentication and organization context from req.Context()
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Decode the request body into a `types.*` struct using the `binding` package
|
||||
var in types.PostableThing
|
||||
if err := binding.JSON.BindBody(req.Body, &in); err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Call module functions
|
||||
out, err := h.module.CreateThing(req.Context(), claims.OrgID, &in)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
// Use the `render` package to write responses or errors
|
||||
render.Success(rw, http.StatusCreated, out)
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Register the handler in `signozapiserver`
|
||||
|
||||
In `pkg/apiserver/signozapiserver`, add a route in the appropriate `add*Routes` function (`addUserRoutes`, `addSessionRoutes`, `addOrgRoutes`, etc.). The pattern is:
|
||||
|
||||
```go
|
||||
if err := router.Handle("/api/v1/things", handler.New(
|
||||
provider.authZ.AdminAccess(provider.thingHandler.CreateThing),
|
||||
handler.OpenAPIDef{
|
||||
ID: "CreateThing",
|
||||
Tags: []string{"things"},
|
||||
Summary: "Create thing",
|
||||
Description: "This endpoint creates a thing",
|
||||
Request: new(types.PostableThing),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(types.GettableThing),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusCreated,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusConflict},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### 4. Update the OpenAPI spec
|
||||
|
||||
Run the following command to update the OpenAPI spec:
|
||||
|
||||
```bash
|
||||
go run cmd/enterprise/*.go generate openapi
|
||||
```
|
||||
|
||||
This will update the OpenAPI spec in `docs/api/openapi.yml` to reflect the new endpoint.
|
||||
|
||||
## How does OpenAPI integration work?
|
||||
|
||||
The `handler.New` function ties the HTTP handler to OpenAPI metadata via `OpenAPIDef`. This drives the generated OpenAPI document.
|
||||
|
||||
- **ID**: A unique identifier for the operation (used as the `operationId`).
|
||||
- **Tags**: Logical grouping for the operation (for example, `"users"`, `"sessions"`, `"orgs"`).
|
||||
- **Summary / Description**: Human-friendly documentation.
|
||||
- **Request / RequestContentType**:
|
||||
- `Request` is a Go type that describes the request body or form.
|
||||
- `RequestContentType` is usually `"application/json"` or `"application/x-www-form-urlencoded"` (for callbacks like SAML).
|
||||
- **Response / ResponseContentType**:
|
||||
- `Response` is the Go type for the successful response payload.
|
||||
- `ResponseContentType` is usually `"application/json"`; use `""` for responses without a body.
|
||||
- **SuccessStatusCode**: The HTTP status for successful responses (for example, `http.StatusOK`, `http.StatusCreated`, `http.StatusNoContent`).
|
||||
- **ErrorStatusCodes**: Additional error status codes beyond the standard ones automatically added by `handler.New`.
|
||||
- **SecuritySchemes**: Auth mechanisms and scopes required by the operation.
|
||||
|
||||
The generic handler:
|
||||
|
||||
- Automatically appends `401`, `403`, and `500` to `ErrorStatusCodes` when appropriate.
|
||||
- Registers request and response schemas with the OpenAPI reflector so they appear in `docs/api/openapi.yml`.
|
||||
|
||||
See existing examples in:
|
||||
|
||||
- `addUserRoutes` (for typical JSON request/response)
|
||||
- `addSessionRoutes` (for form-encoded and redirect flows)
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Keep handlers thin**: focus on HTTP concerns and delegate logic to modules/services.
|
||||
- **Always register routes through `signozapiserver`** using `handler.New` and a complete `OpenAPIDef`.
|
||||
- **Choose accurate request/response types** from the `types` packages so OpenAPI schemas are correct.
|
||||
@@ -9,7 +9,7 @@ SigNoz uses integration tests to verify that different components work together
|
||||
Before running integration tests, ensure you have the following installed:
|
||||
|
||||
- Python 3.13+
|
||||
- Poetry (for dependency management)
|
||||
- [uv](https://docs.astral.sh/uv/getting-started/installation/)
|
||||
- Docker (for containerized services)
|
||||
|
||||
### Initial Setup
|
||||
@@ -19,17 +19,19 @@ Before running integration tests, ensure you have the following installed:
|
||||
cd tests/integration
|
||||
```
|
||||
|
||||
2. Install dependencies using Poetry:
|
||||
2. Install dependencies using uv:
|
||||
```bash
|
||||
poetry install --no-root
|
||||
uv sync
|
||||
```
|
||||
|
||||
> **_NOTE:_** the build backend could throw an error while installing `psycopg2`, pleae see https://www.psycopg.org/docs/install.html#build-prerequisites
|
||||
|
||||
### Starting the Test Environment
|
||||
|
||||
To spin up all the containers necessary for writing integration tests and keep them running:
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/setup.py::test_setup
|
||||
```
|
||||
|
||||
This command will:
|
||||
@@ -42,7 +44,7 @@ This command will:
|
||||
When you're done writing integration tests, clean up the environment:
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
|
||||
uv run pytest --basetemp=./tmp/ -vv --teardown -s src/bootstrap/setup.py::test_teardown
|
||||
```
|
||||
|
||||
This will destroy the running integration test setup and clean up resources.
|
||||
@@ -72,20 +74,20 @@ Python and pytest form the foundation of the integration testing framework. Test
|
||||
│ ├── sqlite.py
|
||||
│ ├── types.py
|
||||
│ └── zookeeper.py
|
||||
├── poetry.lock
|
||||
├── uv.lock
|
||||
├── pyproject.toml
|
||||
└── src
|
||||
└── bootstrap
|
||||
├── __init__.py
|
||||
├── a_database.py
|
||||
├── b_register.py
|
||||
└── c_license.py
|
||||
├── 01_database.py
|
||||
├── 02_register.py
|
||||
└── 03_license.py
|
||||
```
|
||||
|
||||
Each test suite follows some important principles:
|
||||
|
||||
1. **Organization**: Test suites live under `src/` in self-contained packages. Fixtures (a pytest concept) live inside `fixtures/`.
|
||||
2. **Execution Order**: Files are prefixed with `a_`, `b_`, `c_` to ensure sequential execution.
|
||||
2. **Execution Order**: Files are prefixed with two-digit numbers (`01_`, `02_`, `03_`) to ensure sequential execution.
|
||||
3. **Time Constraints**: Each suite should complete in under 10 minutes (setup takes ~4 mins).
|
||||
|
||||
### Test Suite Design
|
||||
@@ -107,7 +109,7 @@ Other test suites can be **pipelines, auth, querier.**
|
||||
|
||||
## How to write an integration test?
|
||||
|
||||
Now start writing an integration test. Create a new file `src/bootstrap/e_version.py` and paste the following:
|
||||
Now start writing an integration test. Create a new file `src/bootstrap/05_version.py` and paste the following:
|
||||
|
||||
```python
|
||||
import requests
|
||||
@@ -125,7 +127,7 @@ def test_version(signoz: types.SigNoz) -> None:
|
||||
We have written a simple test which calls the `version` endpoint of the container in step 1. In **order to just run this function, run the following command:**
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/e_version.py::test_version
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/bootstrap/05_version.py::test_version
|
||||
```
|
||||
|
||||
> Note: The `--reuse` flag is used to reuse the environment if it is already running. Always use this flag when writing and running integration tests. If you don't use this flag, the environment will be destroyed and recreated every time you run the test.
|
||||
@@ -153,7 +155,7 @@ def test_user_registration(signoz: types.SigNoz) -> None:
|
||||
},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
assert response.json()["setupCompleted"] is True
|
||||
```
|
||||
@@ -163,27 +165,27 @@ def test_user_registration(signoz: types.SigNoz) -> None:
|
||||
### Running All Tests
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/
|
||||
```
|
||||
|
||||
### Running Specific Test Categories
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/<suite>
|
||||
|
||||
# Run querier tests
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/querier/
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/querier/
|
||||
# Run auth tests
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/auth/
|
||||
```
|
||||
|
||||
### Running Individual Tests
|
||||
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/<suite>/<file>.py::test_name
|
||||
|
||||
# Run test_register in file a_register.py in auth suite
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse src/auth/a_register.py::test_register
|
||||
# Run test_register in file 01_register.py in passwordauthn suite
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse src/passwordauthn/01_register.py::test_register
|
||||
```
|
||||
|
||||
## How to configure different options for integration tests?
|
||||
@@ -192,12 +194,12 @@ Tests can be configured using pytest options:
|
||||
|
||||
- `--sqlstore-provider` - Choose database provider (default: postgres)
|
||||
- `--postgres-version` - PostgreSQL version (default: 15)
|
||||
- `--clickhouse-version` - ClickHouse version (default: 24.1.2-alpine)
|
||||
- `--clickhouse-version` - ClickHouse version (default: 25.5.6)
|
||||
- `--zookeeper-version` - Zookeeper version (default: 3.7.1)
|
||||
|
||||
Example:
|
||||
```bash
|
||||
poetry run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
|
||||
```
|
||||
|
||||
|
||||
@@ -205,7 +207,7 @@ poetry run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --p
|
||||
|
||||
- **Always use the `--reuse` flag** when setting up the environment to keep containers running
|
||||
- **Use the `--teardown` flag** when cleaning up to avoid resource leaks
|
||||
- **Follow the naming convention** with alphabetical prefixes for test execution order
|
||||
- **Follow the naming convention** with two-digit numeric prefixes (`01_`, `02_`) for test execution order
|
||||
- **Use proper timeouts** in HTTP requests to avoid hanging tests
|
||||
- **Clean up test data** between tests to avoid interference
|
||||
- **Use descriptive test names** that clearly indicate what is being tested
|
||||
|
||||
301
docs/contributing/onboarding.md
Normal file
@@ -0,0 +1,301 @@
|
||||
# Onboarding Configuration Guide
|
||||
|
||||
This guide explains how to add new data sources to the SigNoz onboarding flow. The onboarding configuration controls the "New Source" / "Get Started" experience in SigNoz Cloud.
|
||||
|
||||
## Configuration File Location
|
||||
|
||||
The configuration is located at:
|
||||
|
||||
```
|
||||
frontend/src/container/OnboardingV2Container/onboarding-configs/onboarding-config-with-links.json
|
||||
```
|
||||
|
||||
## JSON Structure Overview
|
||||
|
||||
The configuration file is a JSON array containing data source objects. Each object represents a selectable option in the onboarding flow.
|
||||
|
||||
## Data Source Object Keys
|
||||
|
||||
### Required Keys
|
||||
|
||||
| Key | Type | Description |
|
||||
|-----|------|-------------|
|
||||
| `dataSource` | `string` | Unique identifier for the data source (kebab-case, e.g., `"aws-ec2"`) |
|
||||
| `label` | `string` | Display name shown to users (e.g., `"AWS EC2"`) |
|
||||
| `tags` | `string[]` | Array of category tags for grouping (e.g., `["AWS"]`, `["database"]`) |
|
||||
| `module` | `string` | Destination module after onboarding completion |
|
||||
| `imgUrl` | `string` | Path to the logo/icon **(SVG required)** (e.g., `"/Logos/ec2.svg"`) |
|
||||
|
||||
### Optional Keys
|
||||
|
||||
| Key | Type | Description |
|
||||
|-----|------|-------------|
|
||||
| `link` | `string` | Docs link to redirect to (e.g., `"/docs/aws-monitoring/ec2/"`) |
|
||||
| `relatedSearchKeywords` | `string[]` | Array of keywords for search functionality |
|
||||
| `question` | `object` | Nested question object for multi-step flows |
|
||||
| `internalRedirect` | `boolean` | When `true`, navigates within the app instead of showing docs |
|
||||
|
||||
## Module Values
|
||||
|
||||
The `module` key determines where users are redirected after completing onboarding:
|
||||
|
||||
| Value | Destination |
|
||||
|-------|-------------|
|
||||
| `apm` | APM / Traces |
|
||||
| `logs` | Logs Explorer |
|
||||
| `metrics` | Metrics Explorer |
|
||||
| `dashboards` | Dashboards |
|
||||
| `infra-monitoring-hosts` | Infrastructure Monitoring - Hosts |
|
||||
| `infra-monitoring-k8s` | Infrastructure Monitoring - Kubernetes |
|
||||
| `messaging-queues-kafka` | Messaging Queues - Kafka |
|
||||
| `messaging-queues-celery` | Messaging Queues - Celery |
|
||||
| `integrations` | Integrations page |
|
||||
| `home` | Home page |
|
||||
| `api-monitoring` | API Monitoring |
|
||||
|
||||
## Question Object Structure
|
||||
|
||||
The `question` object enables multi-step selection flows:
|
||||
|
||||
```json
|
||||
{
|
||||
"question": {
|
||||
"desc": "What would you like to monitor?",
|
||||
"type": "select",
|
||||
"helpText": "Choose the telemetry type you want to collect.",
|
||||
"helpLink": "/docs/azure-monitoring/overview/",
|
||||
"helpLinkText": "Read the guide →",
|
||||
"options": [
|
||||
{
|
||||
"key": "logging",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/logging/"
|
||||
},
|
||||
{
|
||||
"key": "metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/metrics/"
|
||||
},
|
||||
{
|
||||
"key": "tracing",
|
||||
"label": "Traces",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/tracing/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Question Keys
|
||||
|
||||
| Key | Type | Description |
|
||||
|-----|------|-------------|
|
||||
| `desc` | `string` | Question text displayed to the user |
|
||||
| `type` | `string` | Currently only `"select"` is supported |
|
||||
| `helpText` | `string` | (Optional) Additional help text below the question |
|
||||
| `helpLink` | `string` | (Optional) Docs link for the help section |
|
||||
| `helpLinkText` | `string` | (Optional) Text for the help link (default: "Learn more →") |
|
||||
| `options` | `array` | Array of option objects |
|
||||
|
||||
## Option Object Structure
|
||||
|
||||
Options can be simple (direct link) or nested (with another question):
|
||||
|
||||
### Simple Option (Direct Link)
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "aws-ec2-logs",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/docs/userguide/collect_logs_from_file/"
|
||||
}
|
||||
```
|
||||
|
||||
### Option with Internal Redirect
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "aws-ec2-metrics-one-click",
|
||||
"label": "One Click AWS",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/integrations?integration=aws-integration&service=ec2",
|
||||
"internalRedirect": true
|
||||
}
|
||||
```
|
||||
|
||||
> **Important**: Set `internalRedirect: true` only for internal app routes (like `/integrations?...`). Docs links should NOT have this flag.
|
||||
|
||||
### Nested Option (Multi-step Flow)
|
||||
|
||||
```json
|
||||
{
|
||||
"key": "aws-ec2-metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"question": {
|
||||
"desc": "How would you like to set up monitoring?",
|
||||
"helpText": "Choose your setup method.",
|
||||
"options": [...]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
### Simple Data Source (Direct Link)
|
||||
|
||||
```json
|
||||
{
|
||||
"dataSource": "aws-elb",
|
||||
"label": "AWS ELB",
|
||||
"tags": ["AWS"],
|
||||
"module": "logs",
|
||||
"relatedSearchKeywords": [
|
||||
"aws",
|
||||
"aws elb",
|
||||
"elb logs",
|
||||
"elastic load balancer"
|
||||
],
|
||||
"imgUrl": "/Logos/elb.svg",
|
||||
"link": "/docs/aws-monitoring/elb/"
|
||||
}
|
||||
```
|
||||
|
||||
### Data Source with Single Question Level
|
||||
|
||||
```json
|
||||
{
|
||||
"dataSource": "app-service",
|
||||
"label": "App Service",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"tags": ["Azure"],
|
||||
"module": "apm",
|
||||
"relatedSearchKeywords": ["azure", "app service"],
|
||||
"question": {
|
||||
"desc": "What telemetry data do you want to visualise?",
|
||||
"type": "select",
|
||||
"options": [
|
||||
{
|
||||
"key": "logging",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/logging/"
|
||||
},
|
||||
{
|
||||
"key": "metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/metrics/"
|
||||
},
|
||||
{
|
||||
"key": "tracing",
|
||||
"label": "Traces",
|
||||
"imgUrl": "/Logos/azure-vm.svg",
|
||||
"link": "/docs/azure-monitoring/app-service/tracing/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Data Source with Nested Questions (2-3 Levels)
|
||||
|
||||
```json
|
||||
{
|
||||
"dataSource": "aws-ec2",
|
||||
"label": "AWS EC2",
|
||||
"tags": ["AWS"],
|
||||
"module": "logs",
|
||||
"relatedSearchKeywords": ["aws", "aws ec2", "ec2 logs", "ec2 metrics"],
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"question": {
|
||||
"desc": "What would you like to monitor for AWS EC2?",
|
||||
"type": "select",
|
||||
"helpText": "Choose the type of telemetry data you want to collect.",
|
||||
"options": [
|
||||
{
|
||||
"key": "aws-ec2-logs",
|
||||
"label": "Logs",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/docs/userguide/collect_logs_from_file/"
|
||||
},
|
||||
{
|
||||
"key": "aws-ec2-metrics",
|
||||
"label": "Metrics",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"question": {
|
||||
"desc": "How would you like to set up EC2 Metrics monitoring?",
|
||||
"helpText": "One Click uses AWS CloudWatch integration. Manual setup uses OpenTelemetry.",
|
||||
"helpLink": "/docs/aws-monitoring/one-click-vs-manual/",
|
||||
"helpLinkText": "Read the comparison guide →",
|
||||
"options": [
|
||||
{
|
||||
"key": "aws-ec2-metrics-one-click",
|
||||
"label": "One Click AWS",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/integrations?integration=aws-integration&service=ec2",
|
||||
"internalRedirect": true
|
||||
},
|
||||
{
|
||||
"key": "aws-ec2-metrics-manual",
|
||||
"label": "Manual Setup",
|
||||
"imgUrl": "/Logos/ec2.svg",
|
||||
"link": "/docs/tutorial/opentelemetry-binary-usage-in-virtual-machine/"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### 1. Tags
|
||||
|
||||
- Use existing tags when possible: `AWS`, `Azure`, `GCP`, `database`, `logs`, `apm/traces`, `infrastructure monitoring`, `LLM Monitoring`
|
||||
- Tags are used for grouping in the sidebar
|
||||
- Every data source must have at least one tag
|
||||
|
||||
### 2. Search Keywords
|
||||
|
||||
- Include variations of the name (e.g., `"aws ec2"`, `"ec2"`, `"ec2 logs"`)
|
||||
- Include common misspellings or alternative names
|
||||
- Keep keywords lowercase and alphabetically sorted
|
||||
|
||||
### 3. Logos
|
||||
|
||||
- Place logo files in `public/Logos/`
|
||||
- Use SVG format
|
||||
- Reference as `"/Logos/your-logo.svg"`
|
||||
|
||||
### 4. Links
|
||||
|
||||
- Docs links should start with `/docs/` (will be prefixed with DOCS_BASE_URL)
|
||||
- Internal app links should start with `/integrations`, `/services`, etc.
|
||||
- Only use `internalRedirect: true` for internal app routes
|
||||
|
||||
### 5. Keys
|
||||
|
||||
- Use kebab-case for `dataSource` and option `key` values
|
||||
- Make keys descriptive and unique
|
||||
- Follow the pattern: `{service}-{subtype}-{action}` (e.g., `aws-ec2-metrics-one-click`)
|
||||
|
||||
## Adding a New Data Source
|
||||
|
||||
1. Add your data source object to the JSON array
|
||||
2. Ensure the logo exists in `public/Logos/`
|
||||
3. Test the flow locally with `yarn dev`
|
||||
4. Validation:
|
||||
- Navigate to the [onboarding page](http://localhost:3301/get-started-with-signoz-cloud) on your local machine
|
||||
- Data source appears in the list
|
||||
- Search keywords work correctly
|
||||
- All links redirect to the correct pages
|
||||
- Questions display correct help text and links
|
||||
- Tags are used for grouping in the UI sidebar
|
||||
- Clicking on Configure redirects to the correct page
|
||||
@@ -103,9 +103,19 @@ Remember to replace the region and ingestion key with proper values as obtained
|
||||
|
||||
Both SigNoz and OTel demo app [frontend-proxy service, to be accurate] share common port allocation at 8080. To prevent port allocation conflicts, modify the OTel demo application config to use port 8081 as the `ENVOY_PORT` value as shown below, and run docker compose command.
|
||||
|
||||
Also, both SigNoz and OTel Demo App have the same `PROMETHEUS_PORT` configured, by default both of them try to start at `9090`, which may cause either of them to fail depending upon which one acquires it first. To prevent this, we need to mofify the value of `PROMETHEUS_PORT` too.
|
||||
|
||||
|
||||
```sh
|
||||
ENVOY_PORT=8081 docker compose up -d
|
||||
ENVOY_PORT=8081 PROMETHEUS_PORT=9091 docker compose up -d
|
||||
```
|
||||
|
||||
Alternatively, we can modify these values using the `.env` file too, which reduces the command as just:
|
||||
|
||||
```sh
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This spins up multiple microservices, with OpenTelemetry instrumentation enabled. you can verify this by,
|
||||
```sh
|
||||
docker compose ps -a
|
||||
|
||||
@@ -232,7 +232,7 @@ func (p *BaseSeasonalProvider) getPredictedSeries(
|
||||
// moving avg of the previous period series + z score threshold * std dev of the series
|
||||
// moving avg of the previous period series - z score threshold * std dev of the series
|
||||
func (p *BaseSeasonalProvider) getBounds(
|
||||
series, predictedSeries *qbtypes.TimeSeries,
|
||||
series, predictedSeries, weekSeries *qbtypes.TimeSeries,
|
||||
zScoreThreshold float64,
|
||||
) (*qbtypes.TimeSeries, *qbtypes.TimeSeries) {
|
||||
upperBoundSeries := &qbtypes.TimeSeries{
|
||||
@@ -246,8 +246,8 @@ func (p *BaseSeasonalProvider) getBounds(
|
||||
}
|
||||
|
||||
for idx, curr := range series.Values {
|
||||
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(series)
|
||||
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(series)
|
||||
upperBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) + zScoreThreshold*p.getStdDev(weekSeries)
|
||||
lowerBound := p.getMovingAvg(predictedSeries, movingAvgWindowSize, idx) - zScoreThreshold*p.getStdDev(weekSeries)
|
||||
upperBoundSeries.Values = append(upperBoundSeries.Values, &qbtypes.TimeSeriesValue{
|
||||
Timestamp: curr.Timestamp,
|
||||
Value: upperBound,
|
||||
@@ -398,8 +398,6 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
aggOfInterest := result.Aggregations[0]
|
||||
|
||||
for _, series := range aggOfInterest.Series {
|
||||
stdDev := p.getStdDev(series)
|
||||
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
|
||||
|
||||
pastPeriodSeries := p.getMatchingSeries(ctx, pastPeriodResult, series)
|
||||
currentSeasonSeries := p.getMatchingSeries(ctx, currentSeasonResult, series)
|
||||
@@ -407,6 +405,9 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
past2SeasonSeries := p.getMatchingSeries(ctx, past2SeasonResult, series)
|
||||
past3SeasonSeries := p.getMatchingSeries(ctx, past3SeasonResult, series)
|
||||
|
||||
stdDev := p.getStdDev(currentSeasonSeries)
|
||||
p.logger.InfoContext(ctx, "calculated standard deviation for series", "anomaly_std_dev", stdDev, "anomaly_labels", series.Labels)
|
||||
|
||||
prevSeriesAvg := p.getAvg(pastPeriodSeries)
|
||||
currentSeasonSeriesAvg := p.getAvg(currentSeasonSeries)
|
||||
pastSeasonSeriesAvg := p.getAvg(pastSeasonSeries)
|
||||
@@ -435,6 +436,7 @@ func (p *BaseSeasonalProvider) getAnomalies(ctx context.Context, orgID valuer.UU
|
||||
upperBoundSeries, lowerBoundSeries := p.getBounds(
|
||||
series,
|
||||
predictedSeries,
|
||||
currentSeasonSeries,
|
||||
zScoreThreshold,
|
||||
)
|
||||
aggOfInterest.UpperBoundSeries = append(aggOfInterest.UpperBoundSeries, upperBoundSeries)
|
||||
|
||||
240
ee/authn/callbackauthn/oidccallbackauthn/authn.go
Normal file
@@ -0,0 +1,240 @@
|
||||
package oidccallbackauthn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
const (
|
||||
redirectPath string = "/api/v1/complete/oidc"
|
||||
)
|
||||
|
||||
var defaultScopes []string = []string{"email", "profile", oidc.ScopeOpenID}
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store authtypes.AuthNStore
|
||||
licensing licensing.Licensing
|
||||
httpClient *client.Client
|
||||
}
|
||||
|
||||
func New(store authtypes.AuthNStore, licensing licensing.Licensing, providerSettings factory.ProviderSettings) (*AuthN, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn")
|
||||
|
||||
httpClient, err := client.New(providerSettings.Logger, providerSettings.TracerProvider, providerSettings.MeterProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &AuthN{
|
||||
settings: settings,
|
||||
store: store,
|
||||
licensing: licensing,
|
||||
httpClient: httpClient,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
|
||||
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderOIDC {
|
||||
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "domain type is not oidc")
|
||||
}
|
||||
|
||||
_, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, siteURL, authDomain)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return oauth2Config.AuthCodeURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String()), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) HandleCallback(ctx context.Context, query url.Values) (*authtypes.CallbackIdentity, error) {
|
||||
if err := query.Get("error"); err != "" {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: error while authenticating").WithAdditional(query.Get("error_description"))
|
||||
}
|
||||
|
||||
state, err := authtypes.NewStateFromString(query.Get("state"))
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "oidc: invalid state").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
oidcProvider, oauth2Config, err := a.oidcProviderAndoauth2Config(ctx, state.URL, authDomain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx = context.WithValue(ctx, oauth2.HTTPClient, a.httpClient.Client())
|
||||
token, err := oauth2Config.Exchange(ctx, query.Get("code"))
|
||||
if err != nil {
|
||||
var retrieveError *oauth2.RetrieveError
|
||||
if errors.As(err, &retrieveError) {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to get token").WithAdditional(retrieveError.ErrorDescription).WithAdditional(string(retrieveError.Body))
|
||||
}
|
||||
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get token").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
claims, err := a.claimsFromIDToken(ctx, authDomain, oidcProvider, token)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if claims == nil && authDomain.AuthDomainConfig().OIDC.GetUserInfo {
|
||||
claims, err = a.claimsFromUserInfo(ctx, oidcProvider, token)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
emailClaim, ok := claims[authDomain.AuthDomainConfig().OIDC.ClaimMapping.Email].(string)
|
||||
if !ok {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email in claims")
|
||||
}
|
||||
|
||||
email, err := valuer.NewEmail(emailClaim)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to parse email").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if !authDomain.AuthDomainConfig().OIDC.InsecureSkipEmailVerified {
|
||||
emailVerifiedClaim, ok := claims["email_verified"].(bool)
|
||||
if !ok {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: missing email_verified in claims")
|
||||
}
|
||||
|
||||
if !emailVerifiedClaim {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "oidc: email is not verified")
|
||||
}
|
||||
}
|
||||
|
||||
name := ""
|
||||
if nameClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Name; nameClaim != "" {
|
||||
if n, ok := claims[nameClaim].(string); ok {
|
||||
name = n
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupsClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Groups; groupsClaim != "" {
|
||||
if claimValue, exists := claims[groupsClaim]; exists {
|
||||
switch g := claimValue.(type) {
|
||||
case []any:
|
||||
for _, group := range g {
|
||||
if gs, ok := group.(string); ok {
|
||||
groups = append(groups, gs)
|
||||
}
|
||||
}
|
||||
case string:
|
||||
// Some IDPs return a single group as a string instead of an array
|
||||
groups = append(groups, g)
|
||||
default:
|
||||
a.settings.Logger().WarnContext(ctx, "oidc: unsupported groups type", "type", fmt.Sprintf("%T", claimValue))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleClaim := authDomain.AuthDomainConfig().OIDC.ClaimMapping.Role; roleClaim != "" {
|
||||
if r, ok := claims[roleClaim].(string); ok {
|
||||
role = r
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) oidcProviderAndoauth2Config(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (*oidc.Provider, *oauth2.Config, error) {
|
||||
if authDomain.AuthDomainConfig().OIDC.IssuerAlias != "" {
|
||||
ctx = oidc.InsecureIssuerURLContext(ctx, authDomain.AuthDomainConfig().OIDC.IssuerAlias)
|
||||
}
|
||||
|
||||
oidcProvider, err := oidc.NewProvider(ctx, authDomain.AuthDomainConfig().OIDC.Issuer)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
scopes := make([]string, len(defaultScopes))
|
||||
copy(scopes, defaultScopes)
|
||||
|
||||
if authDomain.AuthDomainConfig().RoleMapping != nil && len(authDomain.AuthDomainConfig().RoleMapping.GroupMappings) > 0 {
|
||||
scopes = append(scopes, "groups")
|
||||
}
|
||||
|
||||
return oidcProvider, &oauth2.Config{
|
||||
ClientID: authDomain.AuthDomainConfig().OIDC.ClientID,
|
||||
ClientSecret: authDomain.AuthDomainConfig().OIDC.ClientSecret,
|
||||
Endpoint: oidcProvider.Endpoint(),
|
||||
Scopes: scopes,
|
||||
RedirectURL: (&url.URL{
|
||||
Scheme: siteURL.Scheme,
|
||||
Host: siteURL.Host,
|
||||
Path: redirectPath,
|
||||
}).String(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) claimsFromIDToken(ctx context.Context, authDomain *authtypes.AuthDomain, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) {
|
||||
rawIDToken, ok := token.Extra("id_token").(string)
|
||||
if !ok {
|
||||
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, "oidc: no id_token in token response")
|
||||
}
|
||||
|
||||
verifier := provider.Verifier(&oidc.Config{ClientID: authDomain.AuthDomainConfig().OIDC.ClientID})
|
||||
idToken, err := verifier.Verify(ctx, rawIDToken)
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeForbidden, errors.CodeForbidden, "oidc: failed to verify token").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
var claims map[string]any
|
||||
if err := idToken.Claims(&claims); err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return claims, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) claimsFromUserInfo(ctx context.Context, provider *oidc.Provider, token *oauth2.Token) (map[string]any, error) {
|
||||
var claims map[string]any
|
||||
|
||||
userInfo, err := provider.UserInfo(ctx, oauth2.StaticTokenSource(&oauth2.Token{
|
||||
AccessToken: token.AccessToken,
|
||||
TokenType: "Bearer", // The UserInfo endpoint requires a bearer token as per RFC6750
|
||||
}))
|
||||
if err != nil {
|
||||
return nil, errors.Newf(errors.TypeInternal, errors.CodeInternal, "oidc: failed to get user info").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
if err := userInfo.Claims(&claims); err != nil {
|
||||
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "oidc: failed to decode claims").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return claims, nil
|
||||
}
|
||||
182
ee/authn/callbackauthn/samlcallbackauthn/authn.go
Normal file
@@ -0,0 +1,182 @@
|
||||
package samlcallbackauthn
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/x509"
|
||||
"encoding/base64"
|
||||
"encoding/pem"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authn"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
saml2 "github.com/russellhaering/gosaml2"
|
||||
dsig "github.com/russellhaering/goxmldsig"
|
||||
)
|
||||
|
||||
const (
|
||||
redirectPath string = "/api/v1/complete/saml"
|
||||
)
|
||||
|
||||
var _ authn.CallbackAuthN = (*AuthN)(nil)
|
||||
|
||||
type AuthN struct {
|
||||
store authtypes.AuthNStore
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func New(ctx context.Context, store authtypes.AuthNStore, licensing licensing.Licensing) (*AuthN, error) {
|
||||
return &AuthN{
|
||||
store: store,
|
||||
licensing: licensing,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) LoginURL(ctx context.Context, siteURL *url.URL, authDomain *authtypes.AuthDomain) (string, error) {
|
||||
if authDomain.AuthDomainConfig().AuthNProvider != authtypes.AuthNProviderSAML {
|
||||
return "", errors.Newf(errors.TypeInternal, authtypes.ErrCodeAuthDomainMismatch, "saml: domain type is not saml")
|
||||
}
|
||||
|
||||
sp, err := a.serviceProvider(siteURL, authDomain)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
url, err := sp.BuildAuthURL(authtypes.NewState(siteURL, authDomain.StorableAuthDomain().ID).URL.String())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return url, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) HandleCallback(ctx context.Context, formValues url.Values) (*authtypes.CallbackIdentity, error) {
|
||||
state, err := authtypes.NewStateFromString(formValues.Get("RelayState"))
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeInvalidInput, authtypes.ErrCodeInvalidState, "saml: invalid state").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
authDomain, err := a.store.GetAuthDomainFromID(ctx, state.DomainID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = a.licensing.GetActive(ctx, authDomain.StorableAuthDomain().OrgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
sp, err := a.serviceProvider(state.URL, authDomain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(formValues.Get("SAMLResponse"))
|
||||
if err != nil {
|
||||
if errors.As(err, &saml2.ErrVerification{}) {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, err.Error())
|
||||
}
|
||||
|
||||
if errors.As(err, &saml2.ErrMissingElement{}) {
|
||||
return nil, errors.New(errors.TypeNotFound, errors.CodeNotFound, err.Error())
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "saml: expired saml response")
|
||||
}
|
||||
|
||||
email, err := valuer.NewEmail(assertionInfo.NameID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "saml: invalid email").WithAdditional("The nameID assertion is used to retrieve the email address, please check your IDP configuration and try again.")
|
||||
}
|
||||
|
||||
name := ""
|
||||
if nameAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Name; nameAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(nameAttribute); val != "" {
|
||||
name = val
|
||||
}
|
||||
}
|
||||
|
||||
var groups []string
|
||||
if groupAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Groups; groupAttribute != "" {
|
||||
groups = assertionInfo.Values.GetAll(groupAttribute)
|
||||
}
|
||||
|
||||
role := ""
|
||||
if roleAttribute := authDomain.AuthDomainConfig().SAML.AttributeMapping.Role; roleAttribute != "" {
|
||||
if val := assertionInfo.Values.Get(roleAttribute); val != "" {
|
||||
role = val
|
||||
}
|
||||
}
|
||||
|
||||
return authtypes.NewCallbackIdentity(name, email, authDomain.StorableAuthDomain().OrgID, state, groups, role), nil
|
||||
}
|
||||
|
||||
func (a *AuthN) ProviderInfo(ctx context.Context, authDomain *authtypes.AuthDomain) *authtypes.AuthNProviderInfo {
|
||||
state := authtypes.NewState(&url.URL{Path: "login"}, authDomain.StorableAuthDomain().ID).URL.String()
|
||||
|
||||
return &authtypes.AuthNProviderInfo{
|
||||
RelayStatePath: &state,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) serviceProvider(siteURL *url.URL, authDomain *authtypes.AuthDomain) (*saml2.SAMLServiceProvider, error) {
|
||||
certStore, err := a.getCertificateStore(authDomain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
acsURL := &url.URL{Scheme: siteURL.Scheme, Host: siteURL.Host, Path: redirectPath}
|
||||
|
||||
// Note:
|
||||
// The ServiceProviderIssuer is the client id in case of keycloak. Since we set it to the host here, we need to set the client id == host in keycloak.
|
||||
// For AWSSSO, this is the value of Application SAML audience.
|
||||
return &saml2.SAMLServiceProvider{
|
||||
IdentityProviderSSOURL: authDomain.AuthDomainConfig().SAML.SamlIdp,
|
||||
IdentityProviderIssuer: authDomain.AuthDomainConfig().SAML.SamlEntity,
|
||||
ServiceProviderIssuer: siteURL.Host,
|
||||
AssertionConsumerServiceURL: acsURL.String(),
|
||||
SignAuthnRequests: !authDomain.AuthDomainConfig().SAML.InsecureSkipAuthNRequestsSigned,
|
||||
AllowMissingAttributes: true,
|
||||
IDPCertificateStore: certStore,
|
||||
SPKeyStore: dsig.RandomKeyStoreForTest(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *AuthN) getCertificateStore(authDomain *authtypes.AuthDomain) (dsig.X509CertificateStore, error) {
|
||||
certStore := &dsig.MemoryX509CertificateStore{
|
||||
Roots: []*x509.Certificate{},
|
||||
}
|
||||
|
||||
var certBytes []byte
|
||||
if strings.Contains(authDomain.AuthDomainConfig().SAML.SamlCert, "-----BEGIN CERTIFICATE-----") {
|
||||
block, _ := pem.Decode([]byte(authDomain.AuthDomainConfig().SAML.SamlCert))
|
||||
if block == nil {
|
||||
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no valid pem cert found")
|
||||
}
|
||||
|
||||
certBytes = block.Bytes
|
||||
} else {
|
||||
certData, err := base64.StdEncoding.DecodeString(authDomain.AuthDomainConfig().SAML.SamlCert)
|
||||
if err != nil {
|
||||
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to read certificate: %s", err.Error())
|
||||
}
|
||||
|
||||
certBytes = certData
|
||||
}
|
||||
|
||||
idpCert, err := x509.ParseCertificate(certBytes)
|
||||
if err != nil {
|
||||
return certStore, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to prepare saml request, invalid cert: %s", err.Error())
|
||||
}
|
||||
|
||||
certStore.Roots = append(certStore.Roots, idpCert)
|
||||
|
||||
return certStore, nil
|
||||
}
|
||||
98
ee/authz/openfgaauthz/provider.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package openfgaauthz
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
pkgopenfgaauthz "github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
pkgAuthzService authz.AuthZ
|
||||
}
|
||||
|
||||
func NewProviderFactory(sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) factory.ProviderFactory[authz.AuthZ, authz.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("openfga"), func(ctx context.Context, ps factory.ProviderSettings, config authz.Config) (authz.AuthZ, error) {
|
||||
return newOpenfgaProvider(ctx, ps, config, sqlstore, openfgaSchema)
|
||||
})
|
||||
}
|
||||
|
||||
func newOpenfgaProvider(ctx context.Context, settings factory.ProviderSettings, config authz.Config, sqlstore sqlstore.SQLStore, openfgaSchema []openfgapkgtransformer.ModuleFile) (authz.AuthZ, error) {
|
||||
pkgOpenfgaAuthzProvider := pkgopenfgaauthz.NewProviderFactory(sqlstore, openfgaSchema)
|
||||
pkgAuthzService, err := pkgOpenfgaAuthzProvider.New(ctx, settings, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &provider{
|
||||
pkgAuthzService: pkgAuthzService,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *provider) Start(ctx context.Context) error {
|
||||
return provider.pkgAuthzService.Start(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Stop(ctx context.Context) error {
|
||||
return provider.pkgAuthzService.Stop(ctx)
|
||||
}
|
||||
|
||||
func (provider *provider) Check(ctx context.Context, tuple *openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Check(ctx, tuple)
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreation(ctx context.Context, claims authtypes.Claims, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableUser, claims.UserID, orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) CheckWithTupleCreationWithoutClaims(ctx context.Context, orgID valuer.UUID, relation authtypes.Relation, typeable authtypes.Typeable, selectors []authtypes.Selector, _ []authtypes.Selector) error {
|
||||
subject, err := authtypes.NewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.String(), orgID, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tuples, err := typeable.Tuples(subject, relation, selectors, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = provider.BatchCheck(ctx, tuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *provider) BatchCheck(ctx context.Context, tuples []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.BatchCheck(ctx, tuples)
|
||||
}
|
||||
|
||||
func (provider *provider) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
return provider.pkgAuthzService.ListObjects(ctx, subject, relation, typeable)
|
||||
}
|
||||
|
||||
func (provider *provider) Write(ctx context.Context, additions []*openfgav1.TupleKey, deletions []*openfgav1.TupleKey) error {
|
||||
return provider.pkgAuthzService.Write(ctx, additions, deletions)
|
||||
}
|
||||
@@ -15,22 +15,18 @@ type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
define assignee: [user, anonymous]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resources
|
||||
type metaresources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
|
||||
type resource
|
||||
type metaresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
@@ -39,6 +35,6 @@ type resource
|
||||
define block: [user, role#assignee]
|
||||
|
||||
|
||||
type telemetry
|
||||
type telemetryresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define read: [user, role#assignee]
|
||||
|
||||
282
ee/gateway/httpgateway/provider.go
Normal file
@@ -0,0 +1,282 @@
|
||||
package httpgateway
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/http/client"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/types/gatewaytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
type Provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
config gateway.Config
|
||||
httpClient *client.Client
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewProviderFactory(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("http"), func(ctx context.Context, ps factory.ProviderSettings, c gateway.Config) (gateway.Gateway, error) {
|
||||
return New(ctx, ps, c, licensing)
|
||||
})
|
||||
}
|
||||
|
||||
func New(ctx context.Context, providerSettings factory.ProviderSettings, config gateway.Config, licensing licensing.Licensing) (gateway.Gateway, error) {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/ee/gateway/httpgateway")
|
||||
|
||||
httpClient, err := client.New(
|
||||
settings.Logger(),
|
||||
providerSettings.TracerProvider,
|
||||
providerSettings.MeterProvider,
|
||||
client.WithRequestResponseLog(true),
|
||||
client.WithRetryCount(3),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Provider{
|
||||
settings: settings,
|
||||
config: config,
|
||||
httpClient: httpClient,
|
||||
licensing: licensing,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) GetIngestionKeys(ctx context.Context, orgID valuer.UUID, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
qParams := url.Values{}
|
||||
qParams.Add("page", strconv.Itoa(page))
|
||||
qParams.Add("per_page", strconv.Itoa(perPage))
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys", qParams, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ingestionKeys []gatewaytypes.IngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pagination gatewaytypes.Pagination
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &gatewaytypes.GettableIngestionKeys{
|
||||
Keys: ingestionKeys,
|
||||
Pagination: pagination,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) SearchIngestionKeysByName(ctx context.Context, orgID valuer.UUID, name string, page, perPage int) (*gatewaytypes.GettableIngestionKeys, error) {
|
||||
qParams := url.Values{}
|
||||
qParams.Add("name", name)
|
||||
qParams.Add("page", strconv.Itoa(page))
|
||||
qParams.Add("per_page", strconv.Itoa(perPage))
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodGet, "/v1/workspaces/me/keys/search", qParams, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ingestionKeys []gatewaytypes.IngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &ingestionKeys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var pagination gatewaytypes.Pagination
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "_pagination").String()), &pagination); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &gatewaytypes.GettableIngestionKeys{
|
||||
Keys: ingestionKeys,
|
||||
Pagination: pagination,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateIngestionKey(ctx context.Context, orgID valuer.UUID, name string, tags []string, expiresAt time.Time) (*gatewaytypes.GettableCreatedIngestionKey, error) {
|
||||
requestBody := gatewaytypes.PostableIngestionKey{
|
||||
Name: name,
|
||||
Tags: tags,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys", nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var createdKeyResponse gatewaytypes.GettableCreatedIngestionKey
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdKeyResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &createdKeyResponse, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) UpdateIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string, name string, tags []string, expiresAt time.Time) error {
|
||||
requestBody := gatewaytypes.PostableIngestionKey{
|
||||
Name: name,
|
||||
Tags: tags,
|
||||
ExpiresAt: expiresAt,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/keys/"+keyID, nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) DeleteIngestionKey(ctx context.Context, orgID valuer.UUID, keyID string) error {
|
||||
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/keys/"+keyID, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) CreateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, keyID string, signal string, limitConfig gatewaytypes.LimitConfig, tags []string) (*gatewaytypes.GettableCreatedIngestionKeyLimit, error) {
|
||||
requestBody := gatewaytypes.PostableIngestionKeyLimit{
|
||||
Signal: signal,
|
||||
Config: limitConfig,
|
||||
Tags: tags,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
responseBody, err := provider.do(ctx, orgID, http.MethodPost, "/v1/workspaces/me/keys/"+keyID+"/limits", nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var createdIngestionKeyLimitResponse gatewaytypes.GettableCreatedIngestionKeyLimit
|
||||
if err := json.Unmarshal([]byte(gjson.GetBytes(responseBody, "data").String()), &createdIngestionKeyLimitResponse); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &createdIngestionKeyLimitResponse, nil
|
||||
}
|
||||
|
||||
func (provider *Provider) UpdateIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string, limitConfig gatewaytypes.LimitConfig, tags []string) error {
|
||||
requestBody := gatewaytypes.UpdatableIngestionKeyLimit{
|
||||
Config: limitConfig,
|
||||
Tags: tags,
|
||||
}
|
||||
requestBodyBytes, err := json.Marshal(requestBody)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = provider.do(ctx, orgID, http.MethodPatch, "/v1/workspaces/me/limits/"+limitID, nil, requestBodyBytes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) DeleteIngestionKeyLimit(ctx context.Context, orgID valuer.UUID, limitID string) error {
|
||||
_, err := provider.do(ctx, orgID, http.MethodDelete, "/v1/workspaces/me/limits/"+limitID, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (provider *Provider) do(ctx context.Context, orgID valuer.UUID, method string, path string, queryParams url.Values, body []byte) ([]byte, error) {
|
||||
license, err := provider.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "no valid license found").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
// build url
|
||||
requestURL := provider.config.URL.JoinPath(path)
|
||||
|
||||
// add query params to the url
|
||||
if queryParams != nil {
|
||||
requestURL.RawQuery = queryParams.Encode()
|
||||
}
|
||||
|
||||
// build request
|
||||
request, err := http.NewRequestWithContext(ctx, method, requestURL.String(), bytes.NewBuffer(body))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// add headers needed to call gateway
|
||||
request.Header.Set("Content-Type", "application/json")
|
||||
request.Header.Set("X-Signoz-Cloud-Api-Key", license.Key)
|
||||
request.Header.Set("X-Consumer-Username", "lid:00000000-0000-0000-0000-000000000000")
|
||||
request.Header.Set("X-Consumer-Groups", "ns:default")
|
||||
|
||||
// execute request
|
||||
response, err := provider.httpClient.Do(request)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// read response
|
||||
defer response.Body.Close()
|
||||
responseBody, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only 2XX
|
||||
if response.StatusCode/100 == 2 {
|
||||
return responseBody, nil
|
||||
}
|
||||
|
||||
errorMessage := gjson.GetBytes(responseBody, "error").String()
|
||||
if errorMessage == "" {
|
||||
errorMessage = "an unknown error occurred"
|
||||
}
|
||||
|
||||
// return error for non 2XX
|
||||
return nil, provider.errFromStatusCode(response.StatusCode, errorMessage)
|
||||
}
|
||||
|
||||
func (provider *Provider) errFromStatusCode(code int, errorMessage string) error {
|
||||
switch code {
|
||||
case http.StatusBadRequest:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, errorMessage)
|
||||
case http.StatusUnauthorized:
|
||||
return errors.New(errors.TypeUnauthenticated, errors.CodeUnauthenticated, errorMessage)
|
||||
case http.StatusForbidden:
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, errorMessage)
|
||||
case http.StatusNotFound:
|
||||
return errors.New(errors.TypeNotFound, errors.CodeNotFound, errorMessage)
|
||||
case http.StatusConflict:
|
||||
return errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, errorMessage)
|
||||
}
|
||||
|
||||
return errors.New(errors.TypeInternal, errors.CodeInternal, errorMessage)
|
||||
}
|
||||
@@ -1,132 +0,0 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"log/slog"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
const (
|
||||
authzDeniedMessage string = "::AUTHZ-DENIED::"
|
||||
)
|
||||
|
||||
type AuthZ struct {
|
||||
logger *slog.Logger
|
||||
authzService authz.AuthZ
|
||||
}
|
||||
|
||||
func NewAuthZ(logger *slog.Logger) *AuthZ {
|
||||
if logger == nil {
|
||||
panic("cannot build authz middleware, logger is empty")
|
||||
}
|
||||
|
||||
return &AuthZ{logger: logger}
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) ViewAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsViewer(); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) EditAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsEditor(); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) AdminAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
if err := claims.IsAdmin(); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) SelfAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
id := mux.Vars(req)["id"]
|
||||
if err := claims.IsSelfAccess(id); err != nil {
|
||||
middleware.logger.WarnContext(req.Context(), authzDeniedMessage, "claims", claims)
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
func (middleware *AuthZ) OpenAccess(next http.HandlerFunc) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
|
||||
// Check middleware accepts the relation, typeable, parentTypeable (for direct access + group relations) and a callback function to derive selector and parentSelectors on per request basis.
|
||||
func (middleware *AuthZ) Check(next http.HandlerFunc, relation authtypes.Relation, translation authtypes.Relation, typeable authtypes.Typeable, parentTypeable authtypes.Typeable, cb authtypes.SelectorCallbackFn) http.HandlerFunc {
|
||||
return http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
selector, parentSelectors, err := cb(req)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = middleware.authzService.CheckWithTupleCreation(req.Context(), claims, relation, typeable, selector, parentTypeable, parentSelectors...)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
next(rw, req)
|
||||
})
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
package licensing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ func Config(pollInterval time.Duration, failureThreshold int) licensing.Config {
|
||||
once.Do(func() {
|
||||
config = licensing.Config{PollInterval: pollInterval, FailureThreshold: failureThreshold}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid licensing config: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid licensing config"))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
299
ee/modules/dashboard/impldashboard/module.go
Normal file
@@ -0,0 +1,299 @@
|
||||
package impldashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"maps"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
pkgDashboardModule dashboard.Module
|
||||
store dashboardtypes.Store
|
||||
settings factory.ScopedProviderSettings
|
||||
roleSetter role.Setter
|
||||
granter role.Granter
|
||||
querier querier.Querier
|
||||
licensing licensing.Licensing
|
||||
}
|
||||
|
||||
func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, roleSetter role.Setter, granter role.Granter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
scopedProviderSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard")
|
||||
pkgDashboardModule := pkgimpldashboard.NewModule(store, settings, analytics, orgGetter, queryParser)
|
||||
|
||||
return &module{
|
||||
pkgDashboardModule: pkgDashboardModule,
|
||||
store: store,
|
||||
settings: scopedProviderSettings,
|
||||
roleSetter: roleSetter,
|
||||
granter: granter,
|
||||
querier: querier,
|
||||
licensing: licensing,
|
||||
}
|
||||
}
|
||||
|
||||
func (module *module) CreatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storablePublicDashboard, err := module.store.GetPublic(ctx, publicDashboard.DashboardID.StringValue())
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
if storablePublicDashboard != nil {
|
||||
return errors.Newf(errors.TypeAlreadyExists, dashboardtypes.ErrCodePublicDashboardAlreadyExists, "dashboard with id %s is already public", storablePublicDashboard.DashboardID)
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.granter.Grant(ctx, orgID, roletypes.SigNozAnonymousRoleName, authtypes.MustNewSubject(authtypes.TypeableAnonymous, authtypes.AnonymousUser.StringValue(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
additionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, []*authtypes.Object{additionObject}, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.CreatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) GetPublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) (*dashboardtypes.PublicDashboard, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storablePublicDashboard, err := module.store.GetPublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dashboardtypes.NewPublicDashboardFromStorablePublicDashboard(storablePublicDashboard), nil
|
||||
}
|
||||
|
||||
func (module *module) GetDashboardByPublicID(ctx context.Context, id valuer.UUID) (*dashboardtypes.Dashboard, error) {
|
||||
storableDashboard, err := module.store.GetDashboardByPublicID(ctx, id.StringValue())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return dashboardtypes.NewDashboardFromStorableDashboard(storableDashboard), nil
|
||||
}
|
||||
|
||||
func (module *module) GetPublicDashboardSelectorsAndOrg(ctx context.Context, id valuer.UUID, orgs []*types.Organization) ([]authtypes.Selector, valuer.UUID, error) {
|
||||
orgIDs := make([]string, len(orgs))
|
||||
for idx, org := range orgs {
|
||||
orgIDs[idx] = org.ID.StringValue()
|
||||
}
|
||||
|
||||
storableDashboard, err := module.store.GetDashboardByOrgsAndPublicID(ctx, orgIDs, id.StringValue())
|
||||
if err != nil {
|
||||
return nil, valuer.UUID{}, err
|
||||
}
|
||||
|
||||
return []authtypes.Selector{
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, id.StringValue()),
|
||||
}, storableDashboard.OrgID, nil
|
||||
}
|
||||
|
||||
func (module *module) GetPublicWidgetQueryRange(ctx context.Context, id valuer.UUID, widgetIdx, startTime, endTime uint64) (*querybuildertypesv5.QueryRangeResponse, error) {
|
||||
dashboard, err := module.GetDashboardByPublicID(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
query, err := dashboard.GetWidgetQuery(startTime, endTime, widgetIdx, module.settings.Logger())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return module.querier.QueryRange(ctx, dashboard.OrgID, query)
|
||||
}
|
||||
|
||||
func (module *module) UpdatePublic(ctx context.Context, orgID valuer.UUID, publicDashboard *dashboardtypes.PublicDashboard) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return module.store.UpdatePublic(ctx, dashboardtypes.NewStorablePublicDashboardFromPublicDashboard(publicDashboard))
|
||||
}
|
||||
|
||||
func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
dashboard, err := module.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if dashboard.Locked {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "dashboard is locked, please unlock the dashboard to be delete it")
|
||||
}
|
||||
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
err := module.deletePublic(ctx, orgID, id)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.Delete(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) DeletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
publicDashboard, err := module.GetPublic(ctx, orgID, dashboardID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) Collect(ctx context.Context, orgID valuer.UUID) (map[string]any, error) {
|
||||
dashboards, err := module.store.List(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
publicDashboards, err := module.store.ListPublic(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stats := make(map[string]any)
|
||||
maps.Copy(stats, dashboardtypes.NewStatsFromStorableDashboards(dashboards))
|
||||
maps.Copy(stats, dashboardtypes.NewStatsFromStorablePublicDashboards(publicDashboards))
|
||||
return stats, nil
|
||||
}
|
||||
|
||||
func (module *module) Create(ctx context.Context, orgID valuer.UUID, createdBy string, creator valuer.UUID, data dashboardtypes.PostableDashboard) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Create(ctx, orgID, createdBy, creator, data)
|
||||
}
|
||||
|
||||
func (module *module) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Get(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (module *module) GetByMetricNames(ctx context.Context, orgID valuer.UUID, metricNames []string) (map[string][]map[string]string, error) {
|
||||
return module.pkgDashboardModule.GetByMetricNames(ctx, orgID, metricNames)
|
||||
}
|
||||
|
||||
func (module *module) MustGetTypeables() []authtypes.Typeable {
|
||||
return module.pkgDashboardModule.MustGetTypeables()
|
||||
}
|
||||
|
||||
func (module *module) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.List(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *module) Update(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, data dashboardtypes.UpdatableDashboard, diff int) (*dashboardtypes.Dashboard, error) {
|
||||
return module.pkgDashboardModule.Update(ctx, orgID, id, updatedBy, data, diff)
|
||||
}
|
||||
|
||||
func (module *module) LockUnlock(ctx context.Context, orgID valuer.UUID, id valuer.UUID, updatedBy string, role types.Role, lock bool) error {
|
||||
return module.pkgDashboardModule.LockUnlock(ctx, orgID, id, updatedBy, role, lock)
|
||||
}
|
||||
|
||||
func (module *module) deletePublic(ctx context.Context, orgID valuer.UUID, dashboardID valuer.UUID) error {
|
||||
publicDashboard, err := module.store.GetPublic(ctx, dashboardID.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role, err := module.roleSetter.GetOrCreate(ctx, orgID, roletypes.NewRole(roletypes.SigNozAnonymousRoleName, roletypes.SigNozAnonymousRoleDescription, roletypes.RoleTypeManaged, orgID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionObject := authtypes.MustNewObject(
|
||||
authtypes.Resource{
|
||||
Name: dashboardtypes.TypeableMetaResourcePublicDashboard.Name(),
|
||||
Type: authtypes.TypeMetaResource,
|
||||
},
|
||||
authtypes.MustNewSelector(authtypes.TypeMetaResource, publicDashboard.ID.String()),
|
||||
)
|
||||
|
||||
err = module.roleSetter.PatchObjects(ctx, orgID, role.Name, authtypes.RelationRead, nil, []*authtypes.Object{deletionObject})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.DeletePublic(ctx, dashboardID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
165
ee/modules/role/implrole/setter.go
Normal file
@@ -0,0 +1,165 @@
|
||||
package implrole
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/role"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type setter struct {
|
||||
store roletypes.Store
|
||||
authz authz.AuthZ
|
||||
licensing licensing.Licensing
|
||||
registry []role.RegisterTypeable
|
||||
}
|
||||
|
||||
func NewSetter(store roletypes.Store, authz authz.AuthZ, licensing licensing.Licensing, registry []role.RegisterTypeable) role.Setter {
|
||||
return &setter{
|
||||
store: store,
|
||||
authz: authz,
|
||||
licensing: licensing,
|
||||
registry: registry,
|
||||
}
|
||||
}
|
||||
|
||||
func (setter *setter) Create(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (setter *setter) GetOrCreate(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) (*roletypes.Role, error) {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
existingRole, err := setter.store.GetByOrgIDAndName(ctx, role.OrgID, role.Name)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if existingRole != nil {
|
||||
return roletypes.NewRoleFromStorableRole(existingRole), nil
|
||||
}
|
||||
|
||||
err = setter.store.Create(ctx, roletypes.NewStorableRoleFromRole(role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (setter *setter) GetResources(_ context.Context) []*authtypes.Resource {
|
||||
typeables := make([]authtypes.Typeable, 0)
|
||||
for _, register := range setter.registry {
|
||||
typeables = append(typeables, register.MustGetTypeables()...)
|
||||
}
|
||||
// role module cannot self register itself!
|
||||
typeables = append(typeables, setter.MustGetTypeables()...)
|
||||
|
||||
resources := make([]*authtypes.Resource, 0)
|
||||
for _, typeable := range typeables {
|
||||
resources = append(resources, &authtypes.Resource{Name: typeable.Name(), Type: typeable.Type()})
|
||||
}
|
||||
|
||||
return resources
|
||||
}
|
||||
|
||||
func (setter *setter) GetObjects(ctx context.Context, orgID valuer.UUID, id valuer.UUID, relation authtypes.Relation) ([]*authtypes.Object, error) {
|
||||
storableRole, err := setter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects := make([]*authtypes.Object, 0)
|
||||
for _, resource := range setter.GetResources(ctx) {
|
||||
if slices.Contains(authtypes.TypeableRelations[resource.Type], relation) {
|
||||
resourceObjects, err := setter.
|
||||
authz.
|
||||
ListObjects(
|
||||
ctx,
|
||||
authtypes.MustNewSubject(authtypes.TypeableRole, storableRole.ID.String(), orgID, &authtypes.RelationAssignee),
|
||||
relation,
|
||||
authtypes.MustNewTypeableFromType(resource.Type, resource.Name),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objects = append(objects, resourceObjects...)
|
||||
}
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
func (setter *setter) Patch(ctx context.Context, orgID valuer.UUID, role *roletypes.Role) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return setter.store.Update(ctx, orgID, roletypes.NewStorableRoleFromRole(role))
|
||||
}
|
||||
|
||||
func (setter *setter) PatchObjects(ctx context.Context, orgID valuer.UUID, name string, relation authtypes.Relation, additions, deletions []*authtypes.Object) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
additionTuples, err := roletypes.GetAdditionTuples(name, orgID, relation, additions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deletionTuples, err := roletypes.GetDeletionTuples(name, orgID, relation, deletions)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = setter.authz.Write(ctx, additionTuples, deletionTuples)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (setter *setter) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUID) error {
|
||||
_, err := setter.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableRole, err := setter.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
role := roletypes.NewRoleFromStorableRole(storableRole)
|
||||
err = role.CanEditDelete()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return setter.store.Delete(ctx, orgID, id)
|
||||
}
|
||||
|
||||
func (setter *setter) MustGetTypeables() []authtypes.Typeable {
|
||||
return []authtypes.Typeable{authtypes.TypeableRole, roletypes.TypeableResourcesRoles}
|
||||
}
|
||||
@@ -50,7 +50,7 @@ type GetAnomaliesResponse struct {
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
// (rounded value for past peiod) + (seasonal growth)
|
||||
// (rounded value for past period) + (seasonal growth)
|
||||
//
|
||||
// score = abs(value - prediction) / stddev (current_season_query)
|
||||
type anomalyQueryParams struct {
|
||||
@@ -74,12 +74,12 @@ type anomalyQueryParams struct {
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal periods to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal periods to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -19,8 +20,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -35,10 +36,8 @@ type APIHandlerOptions struct {
|
||||
Gateway *httputil.ReverseProxy
|
||||
GatewayUrl string
|
||||
// Querier Influx Interval
|
||||
FluxInterval time.Duration
|
||||
UseLogsNewSchema bool
|
||||
UseTraceNewSchema bool
|
||||
JWT *authtypes.JWT
|
||||
FluxInterval time.Duration
|
||||
GlobalConfig global.Config
|
||||
}
|
||||
|
||||
type APIHandler struct {
|
||||
@@ -60,6 +59,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
FieldsAPI: fields.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.TelemetryStore),
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
@@ -92,9 +92,6 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
// routes available only in ee version
|
||||
router.HandleFunc("/api/v1/features", am.ViewAccess(ah.getFeatureFlags)).Methods(http.MethodGet)
|
||||
|
||||
// paid plans specific routes
|
||||
router.HandleFunc("/api/v1/complete/saml", am.OpenAccess(ah.receiveSAML)).Methods(http.MethodPost)
|
||||
|
||||
// base overrides
|
||||
router.HandleFunc("/api/v1/version", am.OpenAccess(ah.getVersion)).Methods(http.MethodGet)
|
||||
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func handleSsoError(w http.ResponseWriter, r *http.Request, redirectURL string) {
|
||||
ssoError := []byte("Login failed. Please contact your system administrator")
|
||||
dst := make([]byte, base64.StdEncoding.EncodedLen(len(ssoError)))
|
||||
base64.StdEncoding.Encode(dst, ssoError)
|
||||
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectURL, string(dst)), http.StatusSeeOther)
|
||||
}
|
||||
|
||||
// receiveSAML completes a SAML request and gets user logged in
|
||||
func (ah *APIHandler) receiveSAML(w http.ResponseWriter, r *http.Request) {
|
||||
// this is the source url that initiated the login request
|
||||
redirectUri := constants.GetDefaultSiteURL()
|
||||
ctx := context.Background()
|
||||
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// the relay state is sent when a login request is submitted to
|
||||
// Idp.
|
||||
relayState := r.FormValue("RelayState")
|
||||
zap.L().Debug("[receiveML] relay state", zap.String("relayState", relayState))
|
||||
|
||||
parsedState, err := url.Parse(relayState)
|
||||
if err != nil || relayState == "" {
|
||||
zap.L().Error("[receiveSAML] failed to process response - invalid response from IDP", zap.Error(err), zap.Any("request", r))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
// upgrade redirect url from the relay state for better accuracy
|
||||
redirectUri = fmt.Sprintf("%s://%s%s", parsedState.Scheme, parsedState.Host, "/login")
|
||||
|
||||
// fetch domain by parsing relay state.
|
||||
domain, err := ah.Signoz.Modules.User.GetDomainFromSsoResponse(ctx, parsedState)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(domain.OrgID)
|
||||
if err != nil {
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
_, err = ah.Signoz.Licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] sso requested but feature unavailable in org domain")
|
||||
http.Redirect(w, r, fmt.Sprintf("%s?ssoerror=%s", redirectUri, "feature unavailable, please upgrade your billing plan to access this feature"), http.StatusMovedPermanently)
|
||||
return
|
||||
}
|
||||
|
||||
sp, err := domain.PrepareSamlRequest(parsedState)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to prepare saml request for domain", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
assertionInfo, err := sp.RetrieveAssertionInfo(r.FormValue("SAMLResponse"))
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to retrieve assertion info from saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
if assertionInfo.WarningInfo.InvalidTime {
|
||||
zap.L().Error("[receiveSAML] expired saml response", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
email := assertionInfo.NameID
|
||||
if email == "" {
|
||||
zap.L().Error("[receiveSAML] invalid email in the SSO response", zap.String("domain", domain.String()))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
nextPage, err := ah.Signoz.Modules.User.PrepareSsoRedirect(ctx, redirectUri, email)
|
||||
if err != nil {
|
||||
zap.L().Error("[receiveSAML] failed to generate redirect URI after successful login ", zap.String("domain", domain.String()), zap.Error(err))
|
||||
handleSsoError(w, r, redirectUri)
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, nextPage, http.StatusSeeOther)
|
||||
}
|
||||
@@ -10,7 +10,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -77,7 +76,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
ingestionUrl, signozApiUrl, apiErr := getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
signozApiUrl, apiErr := ah.getIngestionUrlAndSigNozAPIUrl(r.Context(), license.Key)
|
||||
if apiErr != nil {
|
||||
RespondError(w, basemodel.WrapApiError(
|
||||
apiErr, "couldn't deduce ingestion url and signoz api url",
|
||||
@@ -85,7 +84,7 @@ func (ah *APIHandler) CloudIntegrationsGenerateConnectionParams(w http.ResponseW
|
||||
return
|
||||
}
|
||||
|
||||
result.IngestionUrl = ingestionUrl
|
||||
result.IngestionUrl = ah.opts.GlobalConfig.IngestionURL.String()
|
||||
result.SigNozAPIUrl = signozApiUrl
|
||||
|
||||
gatewayUrl := ah.opts.GatewayUrl
|
||||
@@ -168,95 +167,66 @@ func (ah *APIHandler) getOrCreateCloudIntegrationPAT(ctx context.Context, orgId
|
||||
func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
ctx context.Context, orgId string, cloudProvider string,
|
||||
) (*types.User, *basemodel.ApiError) {
|
||||
cloudIntegrationUser := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := fmt.Sprintf("%s@signoz.io", cloudIntegrationUser)
|
||||
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
|
||||
|
||||
integrationUserResult, err := ah.Signoz.Modules.User.GetUserByEmailInOrg(ctx, orgId, email)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, basemodel.NotFoundError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
if integrationUserResult != nil {
|
||||
return &integrationUserResult.User, nil
|
||||
}
|
||||
|
||||
zap.L().Info(
|
||||
"cloud integration user not found. Attempting to create the user",
|
||||
zap.String("cloudProvider", cloudProvider),
|
||||
)
|
||||
|
||||
newUser, err := types.NewUser(cloudIntegrationUser, email, types.RoleViewer.String(), orgId)
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't create cloud integration user: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
password := types.MustGenerateFactorPassword(newUser.ID.StringValue())
|
||||
|
||||
err = ah.Signoz.Modules.User.CreateUser(ctx, newUser, user.WithFactorPassword(password))
|
||||
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
return newUser, nil
|
||||
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
|
||||
|
||||
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
return cloudIntegrationUser, nil
|
||||
}
|
||||
|
||||
func getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, string, *basemodel.ApiError,
|
||||
func (ah *APIHandler) getIngestionUrlAndSigNozAPIUrl(ctx context.Context, licenseKey string) (
|
||||
string, *basemodel.ApiError,
|
||||
) {
|
||||
url := fmt.Sprintf(
|
||||
"%s%s",
|
||||
strings.TrimSuffix(constants.ZeusURL, "/"),
|
||||
"/v2/deployments/me",
|
||||
)
|
||||
|
||||
// TODO: remove this struct from here
|
||||
type deploymentResponse struct {
|
||||
Status string `json:"status"`
|
||||
Error string `json:"error"`
|
||||
Data struct {
|
||||
Name string `json:"name"`
|
||||
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
} `json:"data"`
|
||||
Name string `json:"name"`
|
||||
ClusterInfo struct {
|
||||
Region struct {
|
||||
DNS string `json:"dns"`
|
||||
} `json:"region"`
|
||||
} `json:"cluster"`
|
||||
}
|
||||
|
||||
resp, apiErr := requestAndParseResponse[deploymentResponse](
|
||||
ctx, url, map[string]string{"X-Signoz-Cloud-Api-Key": licenseKey}, nil,
|
||||
)
|
||||
|
||||
if apiErr != nil {
|
||||
return "", "", basemodel.WrapApiError(
|
||||
apiErr, "couldn't query for deployment info",
|
||||
)
|
||||
}
|
||||
|
||||
if resp.Status != "success" {
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: status: %s, error: %s",
|
||||
resp.Status, resp.Error,
|
||||
respBytes, err := ah.Signoz.Zeus.GetDeployment(ctx, licenseKey)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't query for deployment info: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.Data.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Data.Name
|
||||
resp := new(deploymentResponse)
|
||||
|
||||
err = json.Unmarshal(respBytes, resp)
|
||||
if err != nil {
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"couldn't unmarshal deployment info response: error: %w", err,
|
||||
))
|
||||
}
|
||||
|
||||
regionDns := resp.ClusterInfo.Region.DNS
|
||||
deploymentName := resp.Name
|
||||
|
||||
if len(regionDns) < 1 || len(deploymentName) < 1 {
|
||||
// Fail early if actual response structure and expectation here ever diverge
|
||||
return "", "", basemodel.InternalError(fmt.Errorf(
|
||||
return "", basemodel.InternalError(fmt.Errorf(
|
||||
"deployment info response not in expected shape. couldn't determine region dns and deployment name",
|
||||
))
|
||||
}
|
||||
|
||||
ingestionUrl := fmt.Sprintf("https://ingest.%s", regionDns)
|
||||
|
||||
signozApiUrl := fmt.Sprintf("https://%s.%s", deploymentName, regionDns)
|
||||
|
||||
return ingestionUrl, signozApiUrl, nil
|
||||
return signozApiUrl, nil
|
||||
}
|
||||
|
||||
type ingestionKey struct {
|
||||
|
||||
@@ -9,9 +9,10 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
pkgError "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/licensetypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"go.uber.org/zap"
|
||||
@@ -25,11 +26,7 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
orgID, err := valuer.NewUUID(claims.OrgID)
|
||||
if err != nil {
|
||||
render.Error(w, pkgError.Newf(pkgError.TypeInvalidInput, pkgError.CodeInvalidInput, "orgId is invalid"))
|
||||
return
|
||||
}
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
featureSet, err := ah.Signoz.Licensing.GetFeatureFlags(r.Context(), orgID)
|
||||
if err != nil {
|
||||
@@ -59,13 +56,16 @@ func (ah *APIHandler) getFeatureFlags(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
}
|
||||
|
||||
if constants.IsPreferSpanMetrics {
|
||||
for idx, feature := range featureSet {
|
||||
if feature.Name == licensetypes.UseSpanMetrics {
|
||||
featureSet[idx].Active = true
|
||||
}
|
||||
}
|
||||
}
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
useSpanMetrics := ah.Signoz.Flagger.BooleanOrEmpty(ctx, flagger.FeatureUseSpanMetrics, evalCtx)
|
||||
|
||||
featureSet = append(featureSet, &licensetypes.Feature{
|
||||
Name: valuer.NewString(flagger.FeatureUseSpanMetrics.String()),
|
||||
Active: useSpanMetrics,
|
||||
Usage: 0,
|
||||
UsageLimit: -1,
|
||||
Route: "",
|
||||
})
|
||||
|
||||
if constants.IsDotMetricsEnabled {
|
||||
for idx, feature := range featureSet {
|
||||
|
||||
@@ -257,18 +257,20 @@ func (aH *APIHandler) queryRangeV5(rw http.ResponseWriter, req *http.Request) {
|
||||
results = append(results, item)
|
||||
}
|
||||
|
||||
// Build step intervals from the anomaly query
|
||||
stepIntervals := make(map[string]uint64)
|
||||
if anomalyQuery.StepInterval.Duration > 0 {
|
||||
stepIntervals[anomalyQuery.Name] = uint64(anomalyQuery.StepInterval.Duration.Seconds())
|
||||
}
|
||||
|
||||
finalResp := &qbtypes.QueryRangeResponse{
|
||||
Type: queryRangeRequest.RequestType,
|
||||
Data: struct {
|
||||
Results []any `json:"results"`
|
||||
}{
|
||||
Data: qbtypes.QueryData{
|
||||
Results: results,
|
||||
},
|
||||
Meta: struct {
|
||||
RowsScanned uint64 `json:"rowsScanned"`
|
||||
BytesScanned uint64 `json:"bytesScanned"`
|
||||
DurationMS uint64 `json:"durationMs"`
|
||||
}{},
|
||||
Meta: qbtypes.ExecStats{
|
||||
StepIntervals: stepIntervals,
|
||||
},
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, finalResp)
|
||||
|
||||
@@ -3,12 +3,18 @@ package app
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
_ "net/http/pprof" // http profiler
|
||||
"slices"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/cache/memorycache"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/ruler/rulestore/sqlrulestore"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux"
|
||||
"go.opentelemetry.io/otel/propagation"
|
||||
|
||||
"github.com/gorilla/handlers"
|
||||
|
||||
@@ -25,7 +31,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/web"
|
||||
"github.com/rs/cors"
|
||||
"github.com/soheilhy/cmux"
|
||||
@@ -50,7 +55,6 @@ import (
|
||||
type Server struct {
|
||||
config signoz.Config
|
||||
signoz *signoz.SigNoz
|
||||
jwt *authtypes.JWT
|
||||
ruleManager *baserules.Manager
|
||||
|
||||
// public http router
|
||||
@@ -67,19 +71,32 @@ type Server struct {
|
||||
}
|
||||
|
||||
// NewServer creates and initializes Server
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT) (*Server, error) {
|
||||
func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
gatewayProxy, err := gateway.NewProxy(config.Gateway.URL.String(), gateway.RoutePrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cacheForTraceDetail, err := memorycache.New(context.TODO(), signoz.Instrumentation.ToProviderSettings(), cache.Config{
|
||||
Provider: "memory",
|
||||
Memory: cache.Memory{
|
||||
NumCounters: 10 * 10000,
|
||||
MaxCost: 1 << 27, // 128 MB
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := clickhouseReader.NewReader(
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.Prometheus,
|
||||
signoz.TelemetryStore.Cluster(),
|
||||
config.Querier.FluxInterval,
|
||||
cacheForTraceDetail,
|
||||
signoz.Cache,
|
||||
nil,
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
@@ -88,10 +105,12 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
|
||||
signoz.Alertmanager,
|
||||
signoz.SQLStore,
|
||||
signoz.TelemetryStore,
|
||||
signoz.TelemetryMetadataStore,
|
||||
signoz.Prometheus,
|
||||
signoz.Modules.OrgGetter,
|
||||
signoz.Querier,
|
||||
signoz.Instrumentation.Logger(),
|
||||
signoz.Instrumentation.ToProviderSettings(),
|
||||
signoz.QueryParser,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -153,7 +172,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
|
||||
FluxInterval: config.Querier.FluxInterval,
|
||||
Gateway: gatewayProxy,
|
||||
GatewayUrl: config.Gateway.URL.String(),
|
||||
JWT: jwt,
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
@@ -164,7 +183,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz, jwt *authtypes.JWT)
|
||||
s := &Server{
|
||||
config: config,
|
||||
signoz: signoz,
|
||||
jwt: jwt,
|
||||
ruleManager: rm,
|
||||
httpHostPort: baseconst.HTTPHostPort,
|
||||
unavailableChannel: make(chan healthcheck.Status),
|
||||
@@ -193,9 +211,19 @@ func (s Server) HealthCheckStatus() chan healthcheck.Status {
|
||||
|
||||
func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*http.Server, error) {
|
||||
r := baseapp.NewRouter()
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger())
|
||||
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz, s.signoz.Modules.RoleGetter)
|
||||
|
||||
r.Use(middleware.NewAuth(s.jwt, []string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(otelmux.Middleware(
|
||||
"apiserver",
|
||||
otelmux.WithMeterProvider(s.signoz.Instrumentation.MeterProvider()),
|
||||
otelmux.WithTracerProvider(s.signoz.Instrumentation.TracerProvider()),
|
||||
otelmux.WithPropagators(propagation.NewCompositeTextMapPropagator(propagation.Baggage{}, propagation.TraceContext{})),
|
||||
otelmux.WithFilter(func(r *http.Request) bool {
|
||||
return !slices.Contains([]string{"/api/v1/health"}, r.URL.Path)
|
||||
}),
|
||||
otelmux.WithPublicEndpoint(),
|
||||
))
|
||||
r.Use(middleware.NewAuthN([]string{"Authorization", "Sec-WebSocket-Protocol"}, s.signoz.Sharder, s.signoz.Tokenizer, s.signoz.Instrumentation.Logger()).Wrap)
|
||||
r.Use(middleware.NewAPIKey(s.signoz.SQLStore, []string{"SIGNOZ-API-KEY"}, s.signoz.Instrumentation.Logger(), s.signoz.Sharder).Wrap)
|
||||
r.Use(middleware.NewTimeout(s.signoz.Instrumentation.Logger(),
|
||||
s.config.APIServer.Timeout.ExcludedRoutes,
|
||||
@@ -220,6 +248,11 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
apiHandler.MetricExplorerRoutes(r, am)
|
||||
apiHandler.RegisterTraceFunnelsRoutes(r, am)
|
||||
|
||||
err := s.signoz.APIServer.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: []string{"*"},
|
||||
AllowedMethods: []string{"GET", "DELETE", "POST", "PUT", "PATCH", "OPTIONS"},
|
||||
@@ -230,7 +263,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
|
||||
|
||||
handler = handlers.CompressHandler(handler)
|
||||
|
||||
err := web.AddToRouter(r)
|
||||
err = web.AddToRouter(r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -325,28 +358,19 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeRulesManager(
|
||||
ch baseint.Reader,
|
||||
cache cache.Cache,
|
||||
alertmanager alertmanager.Alertmanager,
|
||||
sqlstore sqlstore.SQLStore,
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
prometheus prometheus.Prometheus,
|
||||
orgGetter organization.Getter,
|
||||
querier querier.Querier,
|
||||
logger *slog.Logger,
|
||||
) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore)
|
||||
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, metadataStore telemetrytypes.MetadataStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
|
||||
// create manager opts
|
||||
managerOpts := &baserules.ManagerOptions{
|
||||
TelemetryStore: telemetryStore,
|
||||
MetadataStore: metadataStore,
|
||||
Prometheus: prometheus,
|
||||
Context: context.Background(),
|
||||
Logger: zap.L(),
|
||||
Reader: ch,
|
||||
Querier: querier,
|
||||
SLogger: logger,
|
||||
SLogger: providerSettings.Logger,
|
||||
Cache: cache,
|
||||
EvalDelay: baseconst.GetEvalDelay(),
|
||||
PrepareTaskFunc: rules.PrepareTaskFunc,
|
||||
@@ -356,6 +380,7 @@ func makeRulesManager(
|
||||
RuleStore: ruleStore,
|
||||
MaintenanceStore: maintenanceStore,
|
||||
SqlStore: sqlstore,
|
||||
QueryParser: queryParser,
|
||||
}
|
||||
|
||||
// create Manager
|
||||
|
||||
@@ -4,19 +4,12 @@ import (
|
||||
"os"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultSiteURL = "https://localhost:8080"
|
||||
)
|
||||
|
||||
var LicenseSignozIo = "https://license.signoz.io/api/v1"
|
||||
var LicenseAPIKey = GetOrDefaultEnv("SIGNOZ_LICENSE_API_KEY", "")
|
||||
var SaasSegmentKey = GetOrDefaultEnv("SIGNOZ_SAAS_SEGMENT_KEY", "")
|
||||
var FetchFeatures = GetOrDefaultEnv("FETCH_FEATURES", "false")
|
||||
var ZeusFeaturesURL = GetOrDefaultEnv("ZEUS_FEATURES_URL", "ZeusFeaturesURL")
|
||||
|
||||
// this is set via build time variable
|
||||
var ZeusURL = "https://api.signoz.cloud"
|
||||
|
||||
func GetOrDefaultEnv(key string, fallback string) string {
|
||||
v := os.Getenv(key)
|
||||
if len(v) == 0 {
|
||||
@@ -27,24 +20,12 @@ func GetOrDefaultEnv(key string, fallback string) string {
|
||||
|
||||
// constant functions that override env vars
|
||||
|
||||
// GetDefaultSiteURL returns default site url, primarily
|
||||
// used to send saml request and allowing backend to
|
||||
// handle http redirect
|
||||
func GetDefaultSiteURL() string {
|
||||
return GetOrDefaultEnv("SIGNOZ_SITE_URL", DefaultSiteURL)
|
||||
}
|
||||
|
||||
const DotMetricsEnabled = "DOT_METRICS_ENABLED"
|
||||
|
||||
var IsDotMetricsEnabled = false
|
||||
var IsPreferSpanMetrics = false
|
||||
|
||||
func init() {
|
||||
if GetOrDefaultEnv(DotMetricsEnabled, "true") == "true" {
|
||||
IsDotMetricsEnabled = true
|
||||
}
|
||||
|
||||
if GetOrDefaultEnv("USE_SPAN_METRICS", "false") == "true" {
|
||||
IsPreferSpanMetrics = true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,11 +78,6 @@ func NewAnomalyRule(
|
||||
|
||||
opts = append(opts, baserules.WithLogger(logger))
|
||||
|
||||
if p.RuleCondition.CompareOp == ruletypes.ValueIsBelow {
|
||||
target := -1 * *p.RuleCondition.Target
|
||||
p.RuleCondition.Target = &target
|
||||
}
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -239,19 +234,25 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if r.Condition() != nil && r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series)
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -289,19 +290,37 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if r.Condition().RequireMinPoints {
|
||||
if len(series.Points) < r.Condition().RequiredNumPoints {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", "error", filterErr, "rule_name", r.Name())
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
results, err := r.Threshold.ShouldAlert(*series)
|
||||
}
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -310,7 +329,7 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, error) {
|
||||
func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
|
||||
prevState := r.State()
|
||||
|
||||
@@ -327,7 +346,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
r.mtx.Lock()
|
||||
@@ -336,14 +355,19 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
resultFPs := map[uint64]struct{}{}
|
||||
var alerts = make(map[uint64]*ruletypes.Alert, len(res))
|
||||
|
||||
ruleReceivers := r.Threshold.GetRuleReceivers()
|
||||
ruleReceiverMap := make(map[string][]string)
|
||||
for _, value := range ruleReceivers {
|
||||
ruleReceiverMap[value.Name] = value.Channels
|
||||
}
|
||||
|
||||
for _, smpl := range res {
|
||||
l := make(map[string]string, len(smpl.Metric))
|
||||
for _, lbl := range smpl.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
threshold := valueFormatter.Format(r.TargetVal(), r.Unit())
|
||||
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
|
||||
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
@@ -387,6 +411,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
}
|
||||
if smpl.IsMissing {
|
||||
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
|
||||
lb.Set(labels.NoDataLabel, "true")
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
@@ -396,7 +421,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
if _, ok := alerts[h]; ok {
|
||||
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", "rule_id", r.ID(), "alert", alerts[h])
|
||||
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
return nil, err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
@@ -407,13 +432,13 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
State: model.StatePending,
|
||||
Value: smpl.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: r.PreferredChannels(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Missing: smpl.IsMissing,
|
||||
IsRecovering: smpl.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
|
||||
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
@@ -422,7 +447,12 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
alert.Receivers = r.PreferredChannels()
|
||||
// Update the recovering and missing state of existing alert
|
||||
alert.IsRecovering = a.IsRecovering
|
||||
alert.Missing = a.Missing
|
||||
if v, ok := alert.Labels.Map()[ruletypes.LabelThresholdName]; ok {
|
||||
alert.Receivers = ruleReceiverMap[v]
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -478,6 +508,30 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (interface{}, erro
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeFiringToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
currentState := r.State()
|
||||
|
||||
268
ee/query-service/rules/anomaly_test.go
Normal file
@@ -0,0 +1,268 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
// We need this because the anomaly provider makes 6 different queries for various
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
alertOnAbsent bool
|
||||
expectAlerts int
|
||||
}{
|
||||
{
|
||||
description: "AlertOnAbsent=false",
|
||||
alertOnAbsent: false,
|
||||
expectAlerts: 0,
|
||||
},
|
||||
{
|
||||
description: "AlertOnAbsent=true",
|
||||
alertOnAbsent: true,
|
||||
expectAlerts: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AlertOnAbsent = c.alertOnAbsent
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule(
|
||||
"test-anomaly-rule",
|
||||
valuer.GenerateUUID(),
|
||||
&postableRule,
|
||||
reader,
|
||||
nil,
|
||||
logger,
|
||||
nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlerts, alertsFound)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
// Test missing data alert with AbsentFor grace period
|
||||
// 1. Call Eval with data at time t1, to populate lastTimestampWithDatapoints
|
||||
// 2. Call Eval without data at time t2
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
|
||||
// Set target higher than test data so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data with AbsentFor",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
absentFor uint64
|
||||
timeBetweenEvals time.Duration
|
||||
expectAlertOnEval2 int
|
||||
}{
|
||||
{
|
||||
description: "WithinGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 4 * time.Minute,
|
||||
expectAlertOnEval2: 0,
|
||||
},
|
||||
{
|
||||
description: "AfterGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 6 * time.Minute,
|
||||
expectAlertOnEval2: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AbsentFor = c.absentFor
|
||||
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, alertsFound1, "First eval with data should not alert")
|
||||
|
||||
alertsFound2, err := rule.Eval(context.Background(), t2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlertOnEval2, alertsFound2)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@ import (
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/google/uuid"
|
||||
"go.uber.org/zap"
|
||||
@@ -37,6 +37,8 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.SLogger,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -45,7 +47,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
@@ -59,6 +61,8 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -67,7 +71,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
@@ -82,6 +86,8 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
if err != nil {
|
||||
return task, err
|
||||
@@ -89,7 +95,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evalution
|
||||
// create anomaly rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
@@ -126,7 +132,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Annotations[labels.AlertSummaryLabel] = fmt.Sprintf("The rule threshold is set to %.4f, and the observed metric value is {{$value}}.", *parsedRule.RuleCondition.Target)
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
|
||||
@@ -141,6 +146,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -161,6 +168,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
@@ -180,6 +189,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
baserules.WithMetadataStore(opts.ManagerOpts.MetadataStore),
|
||||
)
|
||||
if err != nil {
|
||||
zap.L().Error("failed to prepare a new anomaly rule for test", zap.String("name", alertname), zap.Error(err))
|
||||
@@ -192,16 +203,12 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
// set timestamp to current utc time
|
||||
ts := time.Now().UTC()
|
||||
|
||||
count, err := rule.Eval(ctx, ts)
|
||||
alertsFound, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
zap.L().Error("evaluating rule failed", zap.String("rule", rule.Name()), zap.Error(err))
|
||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
}
|
||||
alertsFound, ok := count.(int)
|
||||
if !ok {
|
||||
return 0, basemodel.InternalError(fmt.Errorf("something went wrong"))
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Duration(1*time.Minute), opts.NotifyFunc)
|
||||
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)
|
||||
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
283
ee/query-service/rules/manager_test.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"math"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
alertmanagermock "github.com/SigNoz/signoz/pkg/alertmanager/alertmanagertest"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/prometheus/prometheustest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstoretest"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
)
|
||||
|
||||
func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
|
||||
target := 10.0
|
||||
recovery := 5.0
|
||||
|
||||
for _, tc := range rules.TcTestNotiSendUnmatchedThresholdRule {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
rule := rules.ThresholdRuleAtLeastOnceValueAbove(target, &recovery)
|
||||
|
||||
// Marshal rule to JSON as TestNotification expects
|
||||
ruleBytes, err := json.Marshal(rule)
|
||||
require.NoError(t, err)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
triggeredTestAlerts := []map[*alertmanagertypes.PostableAlert][]string{}
|
||||
|
||||
// Create manager using test factory with hooks
|
||||
mgr := rules.NewTestManager(t, &rules.TestManagerOptions{
|
||||
AlertmanagerHook: func(am alertmanager.Alertmanager) {
|
||||
fAlert := am.(*alertmanagermock.MockAlertmanager)
|
||||
// mock set notification config
|
||||
fAlert.On("SetNotificationConfig", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil)
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
if tc.ExpectAlerts > 0 {
|
||||
fAlert.On("TestAlert", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
|
||||
triggeredTestAlerts = append(triggeredTestAlerts, args.Get(3).(map[*alertmanagertypes.PostableAlert][]string))
|
||||
}).Return(nil).Times(tc.ExpectAlerts)
|
||||
}
|
||||
},
|
||||
ManagerOptionsHook: func(opts *rules.ManagerOptions) {
|
||||
opts.PrepareTestRuleFunc = TestNotification
|
||||
},
|
||||
SqlStoreHook: func(store sqlstore.SQLStore) {
|
||||
mockStore := store.(*sqlstoretest.Provider)
|
||||
// Mock the organizations query that SendAlerts makes
|
||||
// Bun generates: SELECT id FROM organizations LIMIT 1 (or SELECT "id" FROM "organizations" LIMIT 1)
|
||||
orgRows := mockStore.Mock().NewRows([]string{"id"}).AddRow(orgID.StringValue())
|
||||
// Match bun's generated query pattern - bun may quote identifiers
|
||||
mockStore.Mock().ExpectQuery("SELECT (.+) FROM (.+)organizations(.+) LIMIT (.+)").WillReturnRows(orgRows)
|
||||
},
|
||||
TelemetryStoreHook: func(store telemetrystore.TelemetryStore) {
|
||||
telemetryStore := store.(*telemetrystoretest.Provider)
|
||||
// Set up mock data for telemetry store
|
||||
cols := make([]cmock.ColumnType, 0)
|
||||
cols = append(cols, cmock.ColumnType{Name: "value", Type: "Float64"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "attr", Type: "String"})
|
||||
cols = append(cols, cmock.ColumnType{Name: "ts", Type: "DateTime"})
|
||||
|
||||
alertDataRows := cmock.NewRows(cols, tc.Values)
|
||||
|
||||
mock := telemetryStore.Mock()
|
||||
|
||||
// Generate query arguments for the metric query
|
||||
evalTime := time.Now().UTC()
|
||||
evalWindow := 5 * time.Minute
|
||||
evalDelay := time.Duration(0)
|
||||
queryArgs := rules.GenerateMetricQueryCHArgs(
|
||||
evalTime,
|
||||
evalWindow,
|
||||
evalDelay,
|
||||
"probe_success",
|
||||
metrictypes.Unspecified,
|
||||
)
|
||||
|
||||
mock.ExpectQuery("*WITH __temporal_aggregation_cte*").
|
||||
WithArgs(queryArgs...).
|
||||
WillReturnRows(alertDataRows)
|
||||
},
|
||||
})
|
||||
|
||||
count, apiErr := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
if apiErr != nil {
|
||||
t.Logf("TestNotification error: %v, type: %s", apiErr.Err, apiErr.Typ)
|
||||
}
|
||||
require.Nil(t, apiErr)
|
||||
assert.Equal(t, tc.ExpectAlerts, count)
|
||||
|
||||
if tc.ExpectAlerts > 0 {
|
||||
// check if the alert has been triggered
|
||||
require.Len(t, triggeredTestAlerts, 1)
|
||||
var gotAlerts []*alertmanagertypes.PostableAlert
|
||||
for a := range triggeredTestAlerts[0] {
|
||||
gotAlerts = append(gotAlerts, a)
|
||||
}
|
||||
require.Len(t, gotAlerts, tc.ExpectAlerts)
|
||||
// check if the alert has triggered with correct threshold value
|
||||
if tc.ExpectValue != 0 {
|
||||
assert.Equal(t, strconv.FormatFloat(tc.ExpectValue, 'f', -1, 64), gotAlerts[0].Annotations["value"])
|
||||
}
|
||||
} else {
|
||||
// check if no alerts have been triggered
|
||||
assert.Empty(t, triggeredTestAlerts)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestManager_TestNotification_SendUnmatched_PromRule(t *testing.T) {
|
||||
target := 10.0
|
||||
|
||||
for _, tc := range rules.TcTestNotificationSendUnmatchedPromRule {
|
||||
t.Run(tc.Name, func(t *testing.T) {
|
||||
// Capture base time once per test case to ensure consistent timestamps
|
||||
baseTime := time.Now().UTC()
|
||||
|
||||
rule := rules.BuildPromAtLeastOnceValueAbove(target, nil)
|
||||
|
||||
// Marshal rule to JSON as TestNotification expects
|
||||
ruleBytes, err := json.Marshal(rule)
|
||||
require.NoError(t, err)
|
||||
|
||||
orgID := valuer.GenerateUUID()
|
||||
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
triggeredTestAlerts := []map[*alertmanagertypes.PostableAlert][]string{}
|
||||
|
||||
// Variable to store promProvider for cleanup
|
||||
var promProvider *prometheustest.Provider
|
||||
|
||||
// Create manager using test factory with hooks
|
||||
mgr := rules.NewTestManager(t, &rules.TestManagerOptions{
|
||||
AlertmanagerHook: func(am alertmanager.Alertmanager) {
|
||||
mockAM := am.(*alertmanagermock.MockAlertmanager)
|
||||
// mock set notification config
|
||||
mockAM.On("SetNotificationConfig", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil)
|
||||
// for saving temp alerts that are triggered via TestNotification
|
||||
if tc.ExpectAlerts > 0 {
|
||||
mockAM.On("TestAlert", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
|
||||
triggeredTestAlerts = append(triggeredTestAlerts, args.Get(3).(map[*alertmanagertypes.PostableAlert][]string))
|
||||
}).Return(nil).Times(tc.ExpectAlerts)
|
||||
}
|
||||
},
|
||||
SqlStoreHook: func(store sqlstore.SQLStore) {
|
||||
mockStore := store.(*sqlstoretest.Provider)
|
||||
// Mock the organizations query that SendAlerts makes
|
||||
orgRows := mockStore.Mock().NewRows([]string{"id"}).AddRow(orgID.StringValue())
|
||||
mockStore.Mock().ExpectQuery("SELECT (.+) FROM (.+)organizations(.+) LIMIT (.+)").WillReturnRows(orgRows)
|
||||
},
|
||||
TelemetryStoreHook: func(store telemetrystore.TelemetryStore) {
|
||||
mockStore := store.(*telemetrystoretest.Provider)
|
||||
|
||||
// Set up Prometheus-specific mock data
|
||||
// Fingerprint columns for Prometheus queries
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
// Samples columns for Prometheus queries
|
||||
samplesCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "unix_milli", Type: "Int64"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
{Name: "flags", Type: "UInt32"},
|
||||
}
|
||||
|
||||
// Calculate query time range similar to Prometheus rule tests
|
||||
// TestNotification uses time.Now().UTC() for evaluation
|
||||
// We calculate the query window based on current time to match what the actual evaluation will use
|
||||
evalTime := baseTime
|
||||
evalWindowMs := int64(5 * 60 * 1000) // 5 minutes in ms
|
||||
evalTimeMs := evalTime.UnixMilli()
|
||||
queryStart := ((evalTimeMs-2*evalWindowMs)/60000)*60000 + 1 // truncate to minute + 1ms
|
||||
queryEnd := (evalTimeMs / 60000) * 60000 // truncate to minute
|
||||
|
||||
// Create fingerprint data
|
||||
fingerprint := uint64(12345)
|
||||
labelsJSON := `{"__name__":"test_metric"}`
|
||||
fingerprintData := [][]interface{}{
|
||||
{fingerprint, labelsJSON},
|
||||
}
|
||||
fingerprintRows := cmock.NewRows(fingerprintCols, fingerprintData)
|
||||
|
||||
// Create samples data from test case values, calculating timestamps relative to baseTime
|
||||
validSamplesData := make([][]interface{}, 0)
|
||||
for _, v := range tc.Values {
|
||||
// Skip NaN and Inf values in the samples data
|
||||
if math.IsNaN(v.Value) || math.IsInf(v.Value, 0) {
|
||||
continue
|
||||
}
|
||||
// Calculate timestamp relative to baseTime
|
||||
sampleTimestamp := baseTime.Add(v.Offset).UnixMilli()
|
||||
validSamplesData = append(validSamplesData, []interface{}{
|
||||
"test_metric",
|
||||
fingerprint,
|
||||
sampleTimestamp,
|
||||
v.Value,
|
||||
uint32(0), // flags - 0 means normal value
|
||||
})
|
||||
}
|
||||
samplesRows := cmock.NewRows(samplesCols, validSamplesData)
|
||||
|
||||
mock := mockStore.Mock()
|
||||
|
||||
// Mock the fingerprint query (for Prometheus label matching)
|
||||
mock.ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(fingerprintRows)
|
||||
|
||||
// Mock the samples query (for Prometheus metric data)
|
||||
mock.ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs(
|
||||
"test_metric",
|
||||
"test_metric",
|
||||
"__name__",
|
||||
"test_metric",
|
||||
queryStart,
|
||||
queryEnd,
|
||||
).
|
||||
WillReturnRows(samplesRows)
|
||||
|
||||
// Create Prometheus provider for this test
|
||||
promProvider = prometheustest.New(context.Background(), instrumentationtest.New().ToProviderSettings(), prometheus.Config{}, store)
|
||||
},
|
||||
ManagerOptionsHook: func(opts *rules.ManagerOptions) {
|
||||
// Set Prometheus provider for PromQL queries
|
||||
if promProvider != nil {
|
||||
opts.Prometheus = promProvider
|
||||
}
|
||||
opts.PrepareTestRuleFunc = TestNotification
|
||||
},
|
||||
})
|
||||
|
||||
count, apiErr := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
if apiErr != nil {
|
||||
t.Logf("TestNotification error: %v, type: %s", apiErr.Err, apiErr.Typ)
|
||||
}
|
||||
require.Nil(t, apiErr)
|
||||
assert.Equal(t, tc.ExpectAlerts, count)
|
||||
|
||||
if tc.ExpectAlerts > 0 {
|
||||
// check if the alert has been triggered
|
||||
require.Len(t, triggeredTestAlerts, 1)
|
||||
var gotAlerts []*alertmanagertypes.PostableAlert
|
||||
for a := range triggeredTestAlerts[0] {
|
||||
gotAlerts = append(gotAlerts, a)
|
||||
}
|
||||
require.Len(t, gotAlerts, tc.ExpectAlerts)
|
||||
// check if the alert has triggered with correct threshold value
|
||||
if tc.ExpectValue != 0 && !math.IsNaN(tc.ExpectValue) && !math.IsInf(tc.ExpectValue, 0) {
|
||||
assert.Equal(t, strconv.FormatFloat(tc.ExpectValue, 'f', -1, 64), gotAlerts[0].Annotations["value"])
|
||||
}
|
||||
} else {
|
||||
// check if no alerts have been triggered
|
||||
assert.Empty(t, triggeredTestAlerts)
|
||||
}
|
||||
|
||||
promProvider.Close()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,8 @@ func (formatter Formatter) DataTypeOf(dataType string) sqlschema.DataType {
|
||||
return sqlschema.DataTypeBoolean
|
||||
case "VARCHAR", "CHARACTER VARYING", "CHARACTER":
|
||||
return sqlschema.DataTypeText
|
||||
case "BYTEA":
|
||||
return sqlschema.DataTypeBytea
|
||||
}
|
||||
|
||||
return formatter.Formatter.DataTypeOf(dataType)
|
||||
|
||||
@@ -2,6 +2,7 @@ package postgressqlschema
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
@@ -47,50 +48,45 @@ func (provider *provider) Operator() sqlschema.SQLOperator {
|
||||
}
|
||||
|
||||
func (provider *provider) GetTable(ctx context.Context, tableName sqlschema.TableName) (*sqlschema.Table, []*sqlschema.UniqueConstraint, error) {
|
||||
rows, err := provider.
|
||||
columns := []struct {
|
||||
ColumnName string `bun:"column_name"`
|
||||
Nullable bool `bun:"nullable"`
|
||||
SQLDataType string `bun:"udt_name"`
|
||||
DefaultVal *string `bun:"column_default"`
|
||||
}{}
|
||||
|
||||
err := provider.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
QueryContext(ctx, `
|
||||
NewRaw(`
|
||||
SELECT
|
||||
c.column_name,
|
||||
c.is_nullable = 'YES',
|
||||
c.is_nullable = 'YES' as nullable,
|
||||
c.udt_name,
|
||||
c.column_default
|
||||
FROM
|
||||
information_schema.columns AS c
|
||||
WHERE
|
||||
c.table_name = ?`, string(tableName))
|
||||
c.table_name = ?`, string(tableName)).
|
||||
Scan(ctx, &columns)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if len(columns) == 0 {
|
||||
return nil, nil, sql.ErrNoRows
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := rows.Close(); err != nil {
|
||||
provider.settings.Logger().ErrorContext(ctx, "error closing rows", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
columns := make([]*sqlschema.Column, 0)
|
||||
for rows.Next() {
|
||||
var (
|
||||
name string
|
||||
sqlDataType string
|
||||
nullable bool
|
||||
defaultVal *string
|
||||
)
|
||||
if err := rows.Scan(&name, &nullable, &sqlDataType, &defaultVal); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
sqlschemaColumns := make([]*sqlschema.Column, 0)
|
||||
for _, column := range columns {
|
||||
columnDefault := ""
|
||||
if defaultVal != nil {
|
||||
columnDefault = *defaultVal
|
||||
if column.DefaultVal != nil {
|
||||
columnDefault = *column.DefaultVal
|
||||
}
|
||||
|
||||
columns = append(columns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(name),
|
||||
Nullable: nullable,
|
||||
DataType: provider.fmter.DataTypeOf(sqlDataType),
|
||||
sqlschemaColumns = append(sqlschemaColumns, &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName(column.ColumnName),
|
||||
Nullable: column.Nullable,
|
||||
DataType: provider.fmter.DataTypeOf(column.SQLDataType),
|
||||
Default: columnDefault,
|
||||
})
|
||||
}
|
||||
@@ -208,7 +204,7 @@ WHERE
|
||||
|
||||
return &sqlschema.Table{
|
||||
Name: tableName,
|
||||
Columns: columns,
|
||||
Columns: sqlschemaColumns,
|
||||
PrimaryKeyConstraint: primaryKeyConstraint,
|
||||
ForeignKeyConstraints: foreignKeyConstraints,
|
||||
}, uniqueConstraints, nil
|
||||
|
||||
153
ee/sqlstore/postgressqlstore/formatter.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun/schema"
|
||||
)
|
||||
|
||||
type formatter struct {
|
||||
bunf schema.Formatter
|
||||
}
|
||||
|
||||
func newFormatter(dialect schema.Dialect) sqlstore.SQLFormatter {
|
||||
return &formatter{bunf: schema.NewFormatter(dialect)}
|
||||
}
|
||||
|
||||
func (f *formatter) JSONExtractString(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgres(path)...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONType(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_typeof("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONIsArray(column, path string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, f.JSONType(column, path)...)
|
||||
sql = append(sql, " = "...)
|
||||
sql = schema.Append(f.bunf, sql, "array")
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayElements(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, []byte(alias)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayOfStrings(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_array_elements_text("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), "::text"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONKeys(column, path, alias string) ([]byte, []byte) {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_each("...)
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, f.convertJSONPathToPostgresWithMode(path, false)...)
|
||||
sql = append(sql, ") AS "...)
|
||||
sql = f.bunf.AppendIdent(sql, alias)
|
||||
|
||||
return sql, append([]byte(alias), ".key"...)
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayAgg(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_agg("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) JSONArrayLiteral(values ...string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "jsonb_build_array("...)
|
||||
for idx, value := range values {
|
||||
if idx > 0 {
|
||||
sql = append(sql, ", "...)
|
||||
}
|
||||
sql = schema.Append(f.bunf, sql, value)
|
||||
}
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) TextToJsonColumn(column string) []byte {
|
||||
var sql []byte
|
||||
sql = f.bunf.AppendIdent(sql, column)
|
||||
sql = append(sql, "::jsonb"...)
|
||||
return sql
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgres(jsonPath string) []byte {
|
||||
return f.convertJSONPathToPostgresWithMode(jsonPath, true)
|
||||
}
|
||||
|
||||
func (f *formatter) convertJSONPathToPostgresWithMode(jsonPath string, asText bool) []byte {
|
||||
path := strings.TrimPrefix(strings.TrimPrefix(jsonPath, "$"), ".")
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.Split(path, ".")
|
||||
|
||||
var validParts []string
|
||||
for _, part := range parts {
|
||||
if part != "" {
|
||||
validParts = append(validParts, part)
|
||||
}
|
||||
}
|
||||
|
||||
if len(validParts) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []byte
|
||||
|
||||
for idx, part := range validParts {
|
||||
if idx == len(validParts)-1 {
|
||||
if asText {
|
||||
result = append(result, "->>"...)
|
||||
} else {
|
||||
result = append(result, "->"...)
|
||||
}
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
return result
|
||||
}
|
||||
|
||||
result = append(result, "->"...)
|
||||
result = schema.Append(f.bunf, result, part)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (f *formatter) LowerExpression(expression string) []byte {
|
||||
var sql []byte
|
||||
sql = append(sql, "lower("...)
|
||||
sql = append(sql, expression...)
|
||||
sql = append(sql, ')')
|
||||
return sql
|
||||
}
|
||||
500
ee/sqlstore/postgressqlstore/formatter_test.go
Normal file
@@ -0,0 +1,500 @@
|
||||
package postgressqlstore
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/uptrace/bun/dialect/pgdialect"
|
||||
)
|
||||
|
||||
func TestJSONExtractString(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `"data"->>'field'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.name",
|
||||
expected: `"metadata"->'user'->>'name'`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.level1.level2.level3",
|
||||
expected: `"json_col"->'level1'->'level2'->>'level3'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `"json_col"`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `"data"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONExtractString(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONType(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.field",
|
||||
expected: `jsonb_typeof("data"->'field')`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.age",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'age')`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col")`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data")`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONType(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONIsArray(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path",
|
||||
column: "data",
|
||||
path: "$.items",
|
||||
expected: `jsonb_typeof("data"->'items') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.user.tags",
|
||||
expected: `jsonb_typeof("metadata"->'user'->'tags') = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
column: "json_col",
|
||||
path: "$",
|
||||
expected: `jsonb_typeof("json_col") = 'array'`,
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
column: "data",
|
||||
path: "",
|
||||
expected: `jsonb_typeof("data") = 'array'`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONIsArray(tt.column, tt.path))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayElements(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "elem",
|
||||
expected: `jsonb_array_elements("data") AS "elem"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.items",
|
||||
alias: "item",
|
||||
expected: `jsonb_array_elements("metadata"->'items') AS "item"`,
|
||||
},
|
||||
{
|
||||
name: "deeply nested path",
|
||||
column: "json_col",
|
||||
path: "$.user.tags",
|
||||
alias: "tag",
|
||||
expected: `jsonb_array_elements("json_col"->'user'->'tags') AS "tag"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayElements(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayOfStrings(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "str",
|
||||
expected: `jsonb_array_elements_text("data") AS "str"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.strings",
|
||||
alias: "s",
|
||||
expected: `jsonb_array_elements_text("metadata"->'strings') AS "s"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONArrayOfStrings(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONKeys(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
path string
|
||||
alias string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "root path with dollar sign",
|
||||
column: "data",
|
||||
path: "$",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "root path empty",
|
||||
column: "data",
|
||||
path: "",
|
||||
alias: "k",
|
||||
expected: `jsonb_each("data") AS "k"`,
|
||||
},
|
||||
{
|
||||
name: "nested path",
|
||||
column: "metadata",
|
||||
path: "$.object",
|
||||
alias: "key",
|
||||
expected: `jsonb_each("metadata"->'object') AS "key"`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got, _ := f.JSONKeys(tt.column, tt.path, tt.alias)
|
||||
assert.Equal(t, tt.expected, string(got))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayAgg(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expression string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column",
|
||||
expression: "id",
|
||||
expected: "jsonb_agg(id)",
|
||||
},
|
||||
{
|
||||
name: "expression with function",
|
||||
expression: "DISTINCT name",
|
||||
expected: "jsonb_agg(DISTINCT name)",
|
||||
},
|
||||
{
|
||||
name: "complex expression",
|
||||
expression: "data->>'field'",
|
||||
expected: "jsonb_agg(data->>'field')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayAgg(tt.expression))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONArrayLiteral(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
values []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "empty array",
|
||||
values: []string{},
|
||||
expected: "jsonb_build_array()",
|
||||
},
|
||||
{
|
||||
name: "single value",
|
||||
values: []string{"value1"},
|
||||
expected: "jsonb_build_array('value1')",
|
||||
},
|
||||
{
|
||||
name: "multiple values",
|
||||
values: []string{"value1", "value2", "value3"},
|
||||
expected: "jsonb_build_array('value1', 'value2', 'value3')",
|
||||
},
|
||||
{
|
||||
name: "values with special characters",
|
||||
values: []string{"test", "with space", "with-dash"},
|
||||
expected: "jsonb_build_array('test', 'with space', 'with-dash')",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.JSONArrayLiteral(tt.values...))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJSONPathToPostgresWithMode(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonPath string
|
||||
asText bool
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple path as text",
|
||||
jsonPath: "$.field",
|
||||
asText: true,
|
||||
expected: "->>'field'",
|
||||
},
|
||||
{
|
||||
name: "simple path as json",
|
||||
jsonPath: "$.field",
|
||||
asText: false,
|
||||
expected: "->'field'",
|
||||
},
|
||||
{
|
||||
name: "nested path as text",
|
||||
jsonPath: "$.user.name",
|
||||
asText: true,
|
||||
expected: "->'user'->>'name'",
|
||||
},
|
||||
{
|
||||
name: "nested path as json",
|
||||
jsonPath: "$.user.name",
|
||||
asText: false,
|
||||
expected: "->'user'->'name'",
|
||||
},
|
||||
{
|
||||
name: "deeply nested as text",
|
||||
jsonPath: "$.a.b.c.d",
|
||||
asText: true,
|
||||
expected: "->'a'->'b'->'c'->>'d'",
|
||||
},
|
||||
{
|
||||
name: "root path",
|
||||
jsonPath: "$",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty path",
|
||||
jsonPath: "",
|
||||
asText: true,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New()).(*formatter)
|
||||
got := string(f.convertJSONPathToPostgresWithMode(tt.jsonPath, tt.asText))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextToJsonColumn(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
column string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
column: "data",
|
||||
expected: `"data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with underscore",
|
||||
column: "user_data",
|
||||
expected: `"user_data"::jsonb`,
|
||||
},
|
||||
{
|
||||
name: "column with special characters",
|
||||
column: "json-col",
|
||||
expected: `"json-col"::jsonb`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.TextToJsonColumn(tt.column))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLowerExpression(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
expr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "simple column name",
|
||||
expr: "name",
|
||||
expected: "lower(name)",
|
||||
},
|
||||
{
|
||||
name: "quoted column identifier",
|
||||
expr: `"column_name"`,
|
||||
expected: `lower("column_name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb text extraction",
|
||||
expr: "data->>'field'",
|
||||
expected: "lower(data->>'field')",
|
||||
},
|
||||
{
|
||||
name: "nested jsonb extraction",
|
||||
expr: "metadata->'user'->>'name'",
|
||||
expected: "lower(metadata->'user'->>'name')",
|
||||
},
|
||||
{
|
||||
name: "jsonb_typeof expression",
|
||||
expr: "jsonb_typeof(data->'field')",
|
||||
expected: "lower(jsonb_typeof(data->'field'))",
|
||||
},
|
||||
{
|
||||
name: "string concatenation",
|
||||
expr: "first_name || ' ' || last_name",
|
||||
expected: "lower(first_name || ' ' || last_name)",
|
||||
},
|
||||
{
|
||||
name: "CAST expression",
|
||||
expr: "CAST(value AS TEXT)",
|
||||
expected: "lower(CAST(value AS TEXT))",
|
||||
},
|
||||
{
|
||||
name: "COALESCE expression",
|
||||
expr: "COALESCE(name, 'default')",
|
||||
expected: "lower(COALESCE(name, 'default'))",
|
||||
},
|
||||
{
|
||||
name: "subquery column",
|
||||
expr: "users.email",
|
||||
expected: "lower(users.email)",
|
||||
},
|
||||
{
|
||||
name: "quoted identifier with special chars",
|
||||
expr: `"user-name"`,
|
||||
expected: `lower("user-name")`,
|
||||
},
|
||||
{
|
||||
name: "jsonb to text cast",
|
||||
expr: "data::text",
|
||||
expected: "lower(data::text)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
f := newFormatter(pgdialect.New())
|
||||
got := string(f.LowerExpression(tt.expr))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,10 +15,11 @@ import (
|
||||
)
|
||||
|
||||
type provider struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
settings factory.ScopedProviderSettings
|
||||
sqldb *sql.DB
|
||||
bundb *sqlstore.BunDB
|
||||
dialect *dialect
|
||||
formatter sqlstore.SQLFormatter
|
||||
}
|
||||
|
||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||
@@ -55,11 +56,14 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
||||
|
||||
sqldb := stdlib.OpenDBFromPool(pool)
|
||||
|
||||
pgDialect := pgdialect.New()
|
||||
bunDB := sqlstore.NewBunDB(settings, sqldb, pgDialect, hooks)
|
||||
return &provider{
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: sqlstore.NewBunDB(settings, sqldb, pgdialect.New(), hooks),
|
||||
dialect: new(dialect),
|
||||
settings: settings,
|
||||
sqldb: sqldb,
|
||||
bundb: bunDB,
|
||||
dialect: new(dialect),
|
||||
formatter: newFormatter(bunDB.Dialect()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -75,6 +79,10 @@ func (provider *provider) Dialect() sqlstore.SQLDialect {
|
||||
return provider.dialect
|
||||
}
|
||||
|
||||
func (provider *provider) Formatter() sqlstore.SQLFormatter {
|
||||
return provider.formatter
|
||||
}
|
||||
|
||||
func (provider *provider) BunDBCtx(ctx context.Context) bun.IDB {
|
||||
return provider.bundb.BunDBCtx(ctx)
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
package zeus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
neturl "net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
@@ -24,17 +24,17 @@ func Config() zeus.Config {
|
||||
once.Do(func() {
|
||||
parsedURL, err := neturl.Parse(url)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus URL: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus URL"))
|
||||
}
|
||||
|
||||
deprecatedParsedURL, err := neturl.Parse(deprecatedURL)
|
||||
if err != nil {
|
||||
panic(fmt.Errorf("invalid zeus deprecated URL: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus deprecated URL"))
|
||||
}
|
||||
|
||||
config = zeus.Config{URL: parsedURL, DeprecatedURL: deprecatedParsedURL}
|
||||
if err := config.Validate(); err != nil {
|
||||
panic(fmt.Errorf("invalid zeus config: %w", err))
|
||||
panic(errors.WrapInternalf(err, errors.CodeInternal, "invalid zeus config"))
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -2,4 +2,6 @@ node_modules
|
||||
build
|
||||
*.typegen.ts
|
||||
i18-generate-hash.js
|
||||
src/parser/TraceOperatorParser/**
|
||||
src/parser/TraceOperatorParser/**
|
||||
|
||||
orval.config.ts
|
||||
@@ -1,5 +1,8 @@
|
||||
/**
|
||||
* ESLint Configuration for SigNoz Frontend
|
||||
*/
|
||||
module.exports = {
|
||||
ignorePatterns: ['src/parser/*.ts'],
|
||||
ignorePatterns: ['src/parser/*.ts', 'scripts/update-registry.js'],
|
||||
env: {
|
||||
browser: true,
|
||||
es2021: true,
|
||||
@@ -7,16 +10,12 @@ module.exports = {
|
||||
'jest/globals': true,
|
||||
},
|
||||
extends: [
|
||||
'airbnb',
|
||||
'airbnb-typescript',
|
||||
'eslint:recommended',
|
||||
'plugin:react/recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:@typescript-eslint/eslint-recommended',
|
||||
'plugin:react-hooks/recommended',
|
||||
'plugin:prettier/recommended',
|
||||
'plugin:sonarjs/recommended',
|
||||
'plugin:import/errors',
|
||||
'plugin:import/warnings',
|
||||
'plugin:react/jsx-runtime',
|
||||
],
|
||||
parser: '@typescript-eslint/parser',
|
||||
@@ -25,16 +24,21 @@ module.exports = {
|
||||
ecmaFeatures: {
|
||||
jsx: true,
|
||||
},
|
||||
ecmaVersion: 12,
|
||||
ecmaVersion: 2021,
|
||||
sourceType: 'module',
|
||||
},
|
||||
plugins: [
|
||||
'react',
|
||||
'@typescript-eslint',
|
||||
'simple-import-sort',
|
||||
'react-hooks',
|
||||
'prettier',
|
||||
'jest',
|
||||
'react', // React-specific rules
|
||||
'@typescript-eslint', // TypeScript linting
|
||||
'simple-import-sort', // Auto-sort imports
|
||||
'react-hooks', // React Hooks rules
|
||||
'prettier', // Code formatting
|
||||
'jest', // Jest test rules
|
||||
'jsx-a11y', // Accessibility rules
|
||||
'import', // Import/export linting
|
||||
'sonarjs', // Code quality/complexity
|
||||
// TODO: Uncomment after running: yarn add -D eslint-plugin-spellcheck
|
||||
// 'spellcheck', // Correct spellings
|
||||
],
|
||||
settings: {
|
||||
react: {
|
||||
@@ -48,77 +52,109 @@ module.exports = {
|
||||
},
|
||||
},
|
||||
rules: {
|
||||
// Code quality rules
|
||||
'prefer-const': 'error', // Enforces const for variables never reassigned
|
||||
'no-var': 'error', // Disallows var, enforces let/const
|
||||
'no-else-return': ['error', { allowElseIf: false }], // Reduces nesting by disallowing else after return
|
||||
'no-cond-assign': 'error', // Prevents accidental assignment in conditions (if (x = 1) instead of if (x === 1))
|
||||
'no-debugger': 'error', // Disallows debugger statements in production code
|
||||
curly: 'error', // Requires curly braces for all control statements
|
||||
eqeqeq: ['error', 'always', { null: 'ignore' }], // Enforces === and !== (allows == null for null/undefined check)
|
||||
'no-console': ['error', { allow: ['warn', 'error'] }], // Warns on console.log, allows console.warn/error
|
||||
|
||||
// TypeScript rules
|
||||
'@typescript-eslint/explicit-function-return-type': 'error', // Requires explicit return types on functions
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
// Disallows unused variables/args
|
||||
'error',
|
||||
{
|
||||
argsIgnorePattern: '^_', // Allows unused args prefixed with _ (e.g., _unusedParam)
|
||||
varsIgnorePattern: '^_', // Allows unused vars prefixed with _ (e.g., _unusedVar)
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-explicit-any': 'warn', // Warns when using 'any' type (consider upgrading to error)
|
||||
// TODO: Change to 'error' after fixing ~80 empty function placeholders in providers/contexts
|
||||
'@typescript-eslint/no-empty-function': 'off', // Disallows empty function bodies
|
||||
'@typescript-eslint/no-var-requires': 'error', // Disallows require() in TypeScript (use import instead)
|
||||
'@typescript-eslint/ban-ts-comment': 'off', // Allows @ts-ignore comments (sometimes needed for third-party libs)
|
||||
'no-empty-function': 'off', // Disabled in favor of TypeScript version above
|
||||
|
||||
// React rules
|
||||
'react/jsx-filename-extension': [
|
||||
'error',
|
||||
{
|
||||
extensions: ['.tsx', '.js', '.jsx'],
|
||||
extensions: ['.tsx', '.jsx'], // Warns if JSX is used in non-.jsx/.tsx files
|
||||
},
|
||||
],
|
||||
'react/prop-types': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'error',
|
||||
'@typescript-eslint/no-var-requires': 'error',
|
||||
'react/no-array-index-key': 'error',
|
||||
'linebreak-style': [
|
||||
'error',
|
||||
process.env.platform === 'win32' ? 'windows' : 'unix',
|
||||
],
|
||||
'@typescript-eslint/default-param-last': 'off',
|
||||
'react/prop-types': 'off', // Disabled - using TypeScript instead
|
||||
'react/jsx-props-no-spreading': 'off', // Allows {...props} spreading (common in HOCs, forms, wrappers)
|
||||
'react/no-array-index-key': 'error', // Prevents using array index as key (causes bugs when list changes)
|
||||
|
||||
// simple sort error
|
||||
'simple-import-sort/imports': 'error',
|
||||
'simple-import-sort/exports': 'error',
|
||||
|
||||
// hooks
|
||||
'react-hooks/rules-of-hooks': 'error',
|
||||
'react-hooks/exhaustive-deps': 'error',
|
||||
|
||||
// airbnb
|
||||
'no-underscore-dangle': 'off',
|
||||
'no-console': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'never',
|
||||
jsx: 'never',
|
||||
ts: 'never',
|
||||
tsx: 'never',
|
||||
},
|
||||
],
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: true }],
|
||||
'no-plusplus': 'off',
|
||||
// Accessibility rules
|
||||
'jsx-a11y/label-has-associated-control': [
|
||||
'error',
|
||||
{
|
||||
required: {
|
||||
some: ['nesting', 'id'],
|
||||
some: ['nesting', 'id'], // Labels must either wrap inputs or use htmlFor/id
|
||||
},
|
||||
},
|
||||
],
|
||||
'jsx-a11y/label-has-for': [
|
||||
|
||||
// React Hooks rules
|
||||
'react-hooks/rules-of-hooks': 'error', // Enforces Rules of Hooks (only call at top level)
|
||||
'react-hooks/exhaustive-deps': 'warn', // Warns about missing dependencies in useEffect/useMemo/useCallback
|
||||
|
||||
// Import/export rules
|
||||
'import/extensions': [
|
||||
'error',
|
||||
'ignorePackages',
|
||||
{
|
||||
js: 'never', // Disallows .js extension in imports
|
||||
jsx: 'never', // Disallows .jsx extension in imports
|
||||
ts: 'never', // Disallows .ts extension in imports
|
||||
tsx: 'never', // Disallows .tsx extension in imports
|
||||
},
|
||||
],
|
||||
'import/no-extraneous-dependencies': ['error', { devDependencies: true }], // Prevents importing packages not in package.json
|
||||
// 'import/no-cycle': 'warn', // TODO: Enable later to detect circular dependencies
|
||||
|
||||
// Import sorting rules
|
||||
'simple-import-sort/imports': [
|
||||
'error',
|
||||
{
|
||||
required: {
|
||||
some: ['nesting', 'id'],
|
||||
},
|
||||
groups: [
|
||||
['^react', '^@?\\w'], // React first, then external packages
|
||||
['^@/'], // Absolute imports with @ alias
|
||||
['^\\u0000'], // Side effect imports (import './file')
|
||||
['^\\.'], // Relative imports
|
||||
['^.+\\.s?css$'], // Style imports
|
||||
],
|
||||
},
|
||||
],
|
||||
'@typescript-eslint/no-unused-vars': 'error',
|
||||
'func-style': ['error', 'declaration', { allowArrowFunctions: true }],
|
||||
'arrow-body-style': ['error', 'as-needed'],
|
||||
'simple-import-sort/exports': 'error', // Auto-sorts exports
|
||||
|
||||
// eslint rules need to remove
|
||||
'@typescript-eslint/no-shadow': 'off',
|
||||
'import/no-cycle': 'off',
|
||||
// https://typescript-eslint.io/rules/consistent-return/ check the warning for details
|
||||
'consistent-return': 'off',
|
||||
// Prettier - code formatting
|
||||
'prettier/prettier': [
|
||||
'error',
|
||||
{},
|
||||
{
|
||||
usePrettierrc: true,
|
||||
usePrettierrc: true, // Uses .prettierrc.json for formatting rules
|
||||
},
|
||||
],
|
||||
|
||||
// SonarJS - code quality and complexity
|
||||
'sonarjs/no-duplicate-string': 'off', // Disabled - can be noisy (enable periodically to check)
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: ['src/api/generated/**/*.ts'],
|
||||
rules: {
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/explicit-module-boundary-types': 'off',
|
||||
'no-nested-ternary': 'off',
|
||||
'@typescript-eslint/no-unused-vars': 'warn',
|
||||
'sonarjs/no-duplicate-string': 'off',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
@@ -4,5 +4,14 @@
|
||||
"tabWidth": 1,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"semi": true
|
||||
"semi": true,
|
||||
"printWidth": 80,
|
||||
"bracketSpacing": true,
|
||||
"jsxBracketSameLine": false,
|
||||
"arrowParens": "always",
|
||||
"endOfLine": "lf",
|
||||
"quoteProps": "as-needed",
|
||||
"proseWrap": "preserve",
|
||||
"htmlWhitespaceSensitivity": "css",
|
||||
"embeddedLanguageFormatting": "auto"
|
||||
}
|
||||
|
||||
8
frontend/.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"prettier.requireConfig": true
|
||||
}
|
||||
@@ -34,12 +34,18 @@ Embrace the spirit of collaboration and contribute to the success of our open-so
|
||||
### Linting and Setup
|
||||
|
||||
- It is crucial to refrain from disabling ESLint and TypeScript errors within the project. If there is a specific rule that needs to be disabled, provide a clear and justified explanation for doing so. Maintaining the integrity of the linting and type-checking processes ensures code quality and consistency throughout the codebase.
|
||||
- In our project, we rely on several essential ESLint plugins, namely:
|
||||
- [plugin:@typescript-eslint](https://typescript-eslint.io/rules/)
|
||||
- [airbnb styleguide](https://github.com/airbnb/javascript)
|
||||
- [plugin:sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs)
|
||||
- In our project, we rely on several essential ESLint plugins and configurations:
|
||||
|
||||
To ensure compliance with our coding standards and best practices, we encourage you to refer to the documentation of these plugins. Familiarizing yourself with the ESLint rules they provide will help maintain code quality and consistency throughout the project.
|
||||
- [eslint:recommended](https://eslint.org/docs/latest/rules/) - Core ESLint rules for JavaScript best practices
|
||||
- [plugin:@typescript-eslint](https://typescript-eslint.io/rules/) - TypeScript-specific linting rules
|
||||
- [plugin:react](https://github.com/jsx-eslint/eslint-plugin-react) - React best practices and patterns
|
||||
- [plugin:react-hooks](https://www.npmjs.com/package/eslint-plugin-react-hooks) - Rules of Hooks enforcement
|
||||
- [plugin:sonarjs](https://github.com/SonarSource/eslint-plugin-sonarjs) - Code quality and complexity analysis
|
||||
- [plugin:prettier](https://github.com/prettier/eslint-plugin-prettier) - Code formatting via Prettier
|
||||
- [simple-import-sort](https://github.com/lydell/eslint-plugin-simple-import-sort) - Automatic import organization
|
||||
- [plugin:jsx-a11y](https://github.com/jsx-eslint/eslint-plugin-jsx-a11y) - Accessibility rules for JSX elements
|
||||
|
||||
To ensure compliance with our coding standards and best practices, we encourage you to refer to the documentation of these plugins. Familiarizing yourself with the ESLint rules they provide will help maintain code quality and consistency throughout the project.
|
||||
|
||||
### Naming Conventions
|
||||
|
||||
|
||||
@@ -3,5 +3,6 @@ BUNDLE_ANALYSER="true"
|
||||
FRONTEND_API_ENDPOINT="http://localhost:8080/"
|
||||
PYLON_APP_ID="pylon-app-id"
|
||||
APPCUES_APP_ID="appcess-app-id"
|
||||
PYLON_IDENTITY_SECRET="pylon-identity-secret"
|
||||
|
||||
CI="1"
|
||||
@@ -3,6 +3,7 @@ import type { Config } from '@jest/types';
|
||||
const USE_SAFE_NAVIGATE_MOCK_PATH = '<rootDir>/__mocks__/useSafeNavigate.ts';
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
silent: true,
|
||||
clearMocks: true,
|
||||
coverageDirectory: 'coverage',
|
||||
coverageReporters: ['text', 'cobertura', 'html', 'json-summary'],
|
||||
|
||||
97
frontend/orval.config.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* When making changes to this file, remove this the file name from .eslintignore and tsconfig.json
|
||||
* The reason this is required because of the moduleResolution being "node". Changing this is a more detailed effort.
|
||||
* So, until then, we will keep this file ignored for eslint and typescript.
|
||||
*/
|
||||
|
||||
import { defineConfig } from 'orval';
|
||||
|
||||
export default defineConfig({
|
||||
signoz: {
|
||||
input: {
|
||||
target: '../docs/api/openapi.yml',
|
||||
},
|
||||
output: {
|
||||
target: './src/api/generated/services',
|
||||
client: 'react-query',
|
||||
httpClient: 'axios',
|
||||
mode: 'tags-split',
|
||||
prettier: true,
|
||||
headers: true,
|
||||
clean: true,
|
||||
override: {
|
||||
query: {
|
||||
useQuery: true,
|
||||
useMutation: true,
|
||||
useInvalidate: true,
|
||||
signal: true,
|
||||
useOperationIdAsQueryKey: true,
|
||||
},
|
||||
useDates: true,
|
||||
useNamedParameters: true,
|
||||
enumGenerationType: 'enum',
|
||||
mutator: {
|
||||
path: './src/api/index.ts',
|
||||
name: 'GeneratedAPIInstance',
|
||||
},
|
||||
|
||||
jsDoc: {
|
||||
filter: (schema) => {
|
||||
const allowlist = [
|
||||
'type',
|
||||
'format',
|
||||
'maxLength',
|
||||
'minLength',
|
||||
'description',
|
||||
'minimum',
|
||||
'maximum',
|
||||
'exclusiveMinimum',
|
||||
'exclusiveMaximum',
|
||||
'pattern',
|
||||
'nullable',
|
||||
'enum',
|
||||
];
|
||||
return Object.entries(schema || {})
|
||||
.filter(([key]) => allowlist.includes(key))
|
||||
.map(([key, value]: [string, any]) => ({
|
||||
key,
|
||||
value,
|
||||
}))
|
||||
.sort((a, b) => a.key.length - b.key.length);
|
||||
},
|
||||
},
|
||||
|
||||
components: {
|
||||
schemas: {
|
||||
suffix: 'DTO',
|
||||
},
|
||||
responses: {
|
||||
suffix: 'Response',
|
||||
},
|
||||
parameters: {
|
||||
suffix: 'Params',
|
||||
},
|
||||
requestBodies: {
|
||||
suffix: 'Body',
|
||||
},
|
||||
},
|
||||
|
||||
// info is of type InfoObject from openapi spec
|
||||
header: (info: { title: string; version: string }): string[] => [
|
||||
`! Do not edit manually`,
|
||||
`* The file has been auto-generated using Orval for SigNoz`,
|
||||
`* regenerate with 'yarn generate:api'`,
|
||||
...(info.title ? [info.title] : []),
|
||||
...(info.version ? [`OpenAPI spec version: ${info.version}`] : []),
|
||||
],
|
||||
|
||||
// @ts-expect-error
|
||||
// propertySortOrder, urlEncodeParameters, aliasCombinedTypes
|
||||
// are valid options in the document without types
|
||||
propertySortOrder: 'Alphabetical',
|
||||
urlEncodeParameters: true,
|
||||
aliasCombinedTypes: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -14,11 +14,12 @@
|
||||
"jest": "jest",
|
||||
"jest:coverage": "jest --coverage",
|
||||
"jest:watch": "jest --watch",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure)",
|
||||
"postinstall": "yarn i18n:generate-hash && (is-ci || yarn husky:configure) && node scripts/update-registry.js",
|
||||
"husky:configure": "cd .. && husky install frontend/.husky && cd frontend && chmod ug+x .husky/*",
|
||||
"commitlint": "commitlint --edit $1",
|
||||
"test": "jest",
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent"
|
||||
"test:changedsince": "jest --changedSince=main --coverage --silent",
|
||||
"generate:api": "orval --config ./orval.config.ts && sh scripts/post-types-generation.sh && prettier --write src/api/generated && (eslint --fix src/api/generated || true)"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16.15.0"
|
||||
@@ -38,7 +39,7 @@
|
||||
"@mdx-js/loader": "2.3.0",
|
||||
"@mdx-js/react": "2.3.0",
|
||||
"@monaco-editor/react": "^4.3.1",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@playwright/test": "1.55.1",
|
||||
"@radix-ui/react-tabs": "1.0.4",
|
||||
"@radix-ui/react-tooltip": "1.0.7",
|
||||
"@sentry/react": "8.41.0",
|
||||
@@ -47,6 +48,9 @@
|
||||
"@signozhq/button": "0.0.2",
|
||||
"@signozhq/calendar": "0.0.0",
|
||||
"@signozhq/callout": "0.0.2",
|
||||
"@signozhq/checkbox": "0.0.2",
|
||||
"@signozhq/combobox": "0.0.2",
|
||||
"@signozhq/command": "0.0.0",
|
||||
"@signozhq/design-tokens": "1.1.4",
|
||||
"@signozhq/input": "0.0.2",
|
||||
"@signozhq/popover": "0.0.0",
|
||||
@@ -69,7 +73,7 @@
|
||||
"antd": "5.11.0",
|
||||
"antd-table-saveas-excel": "2.2.1",
|
||||
"antlr4": "4.13.2",
|
||||
"axios": "1.8.2",
|
||||
"axios": "1.12.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^29.6.4",
|
||||
"babel-loader": "9.1.3",
|
||||
@@ -83,6 +87,7 @@
|
||||
"color": "^4.2.1",
|
||||
"color-alpha": "1.1.3",
|
||||
"cross-env": "^7.0.3",
|
||||
"crypto-js": "4.2.0",
|
||||
"css-loader": "5.0.0",
|
||||
"css-minimizer-webpack-plugin": "5.0.1",
|
||||
"d3-hierarchy": "3.1.2",
|
||||
@@ -100,9 +105,9 @@
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
"immer": "11.1.3",
|
||||
"jest": "^27.5.1",
|
||||
"js-base64": "^3.7.2",
|
||||
"kbar": "0.1.0-beta.48",
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
@@ -112,7 +117,7 @@
|
||||
"overlayscrollbars": "^2.8.1",
|
||||
"overlayscrollbars-react": "^0.5.6",
|
||||
"papaparse": "5.4.1",
|
||||
"posthog-js": "1.215.5",
|
||||
"posthog-js": "1.298.0",
|
||||
"rc-tween-one": "3.0.6",
|
||||
"react": "18.2.0",
|
||||
"react-addons-update": "15.6.3",
|
||||
@@ -149,7 +154,6 @@
|
||||
"tsconfig-paths-webpack-plugin": "^3.5.1",
|
||||
"typescript": "^4.0.5",
|
||||
"uplot": "1.6.31",
|
||||
"userpilot": "1.3.9",
|
||||
"uuid": "^8.3.2",
|
||||
"web-vitals": "^0.2.4",
|
||||
"webpack": "5.94.0",
|
||||
@@ -186,6 +190,7 @@
|
||||
"@types/color": "^3.0.3",
|
||||
"@types/compression-webpack-plugin": "^9.0.0",
|
||||
"@types/copy-webpack-plugin": "^8.0.1",
|
||||
"@types/crypto-js": "4.2.2",
|
||||
"@types/dompurify": "^2.4.0",
|
||||
"@types/event-source-polyfill": "^1.0.0",
|
||||
"@types/fontfaceobserver": "2.1.0",
|
||||
@@ -217,16 +222,11 @@
|
||||
"compression-webpack-plugin": "9.0.0",
|
||||
"copy-webpack-plugin": "^11.0.0",
|
||||
"eslint": "^7.32.0",
|
||||
"eslint-config-airbnb": "^19.0.4",
|
||||
"eslint-config-airbnb-typescript": "^16.1.4",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-config-standard": "^16.0.3",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
"eslint-plugin-jest": "^26.9.0",
|
||||
"eslint-plugin-jsx-a11y": "^6.5.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^4.0.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
"eslint-plugin-react": "^7.24.0",
|
||||
"eslint-plugin-react-hooks": "^4.3.0",
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||
@@ -240,6 +240,7 @@
|
||||
"lint-staged": "^12.5.0",
|
||||
"msw": "1.3.2",
|
||||
"npm-run-all": "latest",
|
||||
"orval": "7.18.0",
|
||||
"portfinder-sync": "^0.0.2",
|
||||
"postcss": "8.4.38",
|
||||
"prettier": "2.2.1",
|
||||
@@ -279,6 +280,8 @@
|
||||
"prismjs": "1.30.0",
|
||||
"got": "11.8.5",
|
||||
"form-data": "4.0.4",
|
||||
"brace-expansion": "^2.0.2"
|
||||
"brace-expansion": "^2.0.2",
|
||||
"on-headers": "^1.1.0",
|
||||
"tmp": "0.2.4"
|
||||
}
|
||||
}
|
||||
|
||||
81
frontend/public/Images/cloud.svg
Normal file
@@ -0,0 +1,81 @@
|
||||
|
||||
<svg
|
||||
width="32"
|
||||
height="32"
|
||||
viewBox="0 0 32 32"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<path
|
||||
d="M19.11 16.8483L14.0369 17.7304C14.0369 17.7304 12.3481 22.0324 12.2437 22.3235C12.1392 22.6146 12.1437 23.0746 12.6037 23.0746C13.1881 23.0746 16.1546 23.0591 16.1546 23.0591C16.1546 23.0591 15.4346 26.5322 15.3924 26.8433C15.3502 27.1544 15.6835 27.4277 15.9768 27.1144C16.2701 26.8011 20.3121 21.6058 20.4877 21.3525C20.801 20.8992 20.4988 20.4814 20.1433 20.4614C19.7877 20.4414 17.4056 20.4925 17.4056 20.4925L19.11 16.8483Z"
|
||||
fill="#FECA18"
|
||||
/>
|
||||
<path
|
||||
d="M17.7589 17.4527C17.7589 17.4527 16.6279 19.9548 16.5856 20.097C16.4612 20.5192 17.0078 20.6903 17.1634 20.3481C17.3189 20.0037 18.6655 17.2194 18.6655 17.2194L17.7589 17.4527Z"
|
||||
fill="#FDB900"
|
||||
/>
|
||||
<path
|
||||
d="M12.8859 22.2592C13.1836 22.2503 15.7968 22.2436 16.0146 22.2281C16.4213 22.1969 16.4835 22.7591 16.0146 22.7591C15.528 22.7591 12.9637 22.768 12.8081 22.7747C12.4481 22.7925 12.3992 22.2747 12.8859 22.2592Z"
|
||||
fill="#FDB900"
|
||||
/>
|
||||
<path
|
||||
d="M14.9813 17.1127C14.9813 17.1127 13.6481 20.4592 13.5725 20.7103C13.2592 21.7591 14.3858 21.728 14.6836 21.1325C14.8302 20.837 16.2635 17.8016 16.3257 17.2527C16.3879 16.7061 14.9813 17.1127 14.9813 17.1127Z"
|
||||
fill="#FFE36A"
|
||||
/>
|
||||
<path
|
||||
d="M15.3347 21.0148C15.1436 20.8837 14.9125 20.9992 14.7725 21.2192C14.6325 21.4392 14.428 21.797 14.7414 21.9858C15.0236 22.1547 15.2724 21.8147 15.3835 21.657C15.4924 21.4992 15.6324 21.217 15.3347 21.0148Z"
|
||||
fill="#FFE36A"
|
||||
/>
|
||||
<path
|
||||
d="M17.6301 21.7326C17.1212 21.6237 16.9568 22.0459 16.8479 22.5459C16.739 23.0459 16.3302 24.6636 16.2546 25.0635C16.1457 25.6257 16.6612 25.7057 16.8812 25.188C17.0457 24.8013 17.759 23.057 17.8501 22.7792C17.9901 22.3592 18.1456 21.8437 17.6301 21.7326Z"
|
||||
fill="#FFE36A"
|
||||
/>
|
||||
<path
|
||||
d="M25.7585 12.0573C25.7585 12.0573 26.3363 4.28441 19.69 3.2978C13.7147 2.41118 12.5415 8.08421 12.5415 8.08421C12.5415 8.08421 10.2838 7.55757 8.66166 9.00639C7.05064 10.4463 7.17508 12.1507 7.17508 12.1507C7.17508 12.1507 3.20195 11.524 2.79531 14.935C2.41533 18.1215 7.11286 17.3282 7.11286 17.3282L29.4183 15.686C29.4183 15.686 29.9472 14.0106 28.2606 12.7462C27.2585 11.9929 25.7585 12.0573 25.7585 12.0573Z"
|
||||
fill="#E4EAEE"
|
||||
/>
|
||||
<path
|
||||
d="M13.7347 13.8196C13.9213 13.7574 14.9857 14.6662 18.0522 14.6951C22.2653 14.7373 25.4563 12.2552 25.4563 12.2552C25.4563 12.2552 25.5629 13.0108 25.2274 13.5485C24.8096 14.2151 24.163 14.3418 24.163 14.3418C24.163 14.3418 25.3318 15.1551 26.9362 15.0307C28.3828 14.9173 29.4294 14.4262 29.4294 14.4262C29.4294 14.4262 29.5694 15.1395 29.4916 15.8351C29.3361 17.2217 28.5228 17.7861 27.6251 17.8883C26.9651 17.9638 21.3276 17.9905 19.0122 18.0127C16.9256 18.0327 6.29285 18.2994 5.20624 18.1794C4.07963 18.0549 3.22412 17.3772 2.91303 16.4061C2.67526 15.6706 2.78859 15.1973 2.78859 15.1973C2.78859 15.1973 4.85959 15.7462 6.02175 15.6151C7.48167 15.4484 8.46162 14.5307 8.46162 14.5307C8.46162 14.5307 9.33713 15.0307 11.277 14.864C12.9458 14.7173 13.7347 13.8196 13.7347 13.8196Z"
|
||||
fill="url(#paint0_radial_811_5475)"
|
||||
/>
|
||||
<path
|
||||
d="M24.8653 18.2661C24.6386 18.1394 23.832 18.8927 23.3787 19.346C23.1009 19.6238 22.2165 20.3504 22.0965 21.0504C21.7988 22.7703 23.7698 23.1614 24.552 22.3637C25.143 21.7615 25.0364 20.586 25.0364 20.1904C25.0386 19.6416 25.1475 18.4216 24.8653 18.2661Z"
|
||||
fill="#52C0EE"
|
||||
/>
|
||||
<path
|
||||
d="M8.67058 19.1904C8.45504 19.0659 7.68174 19.7948 7.24621 20.237C6.97956 20.5058 6.13294 21.2103 6.01294 21.8947C5.71962 23.5723 7.5973 23.9657 8.34837 23.1924C8.91501 22.608 8.82168 21.4591 8.82391 21.0725C8.82613 20.537 8.93723 19.3459 8.67058 19.1904Z"
|
||||
fill="#52C0EE"
|
||||
/>
|
||||
<path
|
||||
d="M12.2548 24.1634C12.0126 24.0723 11.1971 24.8145 10.7438 25.2678C10.466 25.5456 9.64386 26.2566 9.52386 26.9566C9.2261 28.6765 11.1349 29.0832 11.9171 28.2854C12.5081 27.6832 12.4104 26.5077 12.4015 26.1122C12.3793 25.0812 12.4215 24.2256 12.2548 24.1634Z"
|
||||
fill="#52C0EE"
|
||||
/>
|
||||
<path
|
||||
d="M23.5054 20.4926C23.1943 20.3304 22.8165 20.3993 22.5765 20.8992C22.3365 21.3992 22.5343 21.797 22.7854 21.9103C23.0743 22.0436 23.432 21.9214 23.672 21.5147C23.912 21.1081 23.7654 20.6281 23.5054 20.4926Z"
|
||||
fill="#B2E6FE"
|
||||
/>
|
||||
<path
|
||||
d="M10.6682 26.4279C10.3482 26.3168 9.99715 26.4345 9.83716 26.9478C9.67717 27.4611 9.92382 27.8122 10.1794 27.8878C10.4749 27.9744 10.7993 27.8078 10.9727 27.3834C11.1438 26.9589 10.9349 26.5212 10.6682 26.4279Z"
|
||||
fill="#B2E6FE"
|
||||
/>
|
||||
<path
|
||||
d="M7.12613 21.3258C6.78837 21.2325 6.43283 21.3769 6.30395 21.9169C6.17507 22.4569 6.45061 22.8035 6.71948 22.8613C7.03058 22.9302 7.35278 22.7369 7.50389 22.288C7.65277 21.8436 7.41056 21.4036 7.12613 21.3258Z"
|
||||
fill="#B2E6FE"
|
||||
/>
|
||||
<defs>
|
||||
<radialGradient
|
||||
id="paint0_radial_811_5475"
|
||||
cx="0"
|
||||
cy="0"
|
||||
r="1"
|
||||
gradientTransform="matrix(0.18837 -6.53799 9.79456 0.282554 16.401 18.5051)"
|
||||
gradientUnits="userSpaceOnUse"
|
||||
>
|
||||
<stop offset="0.1934" stopColor="#FFE366" />
|
||||
<stop offset="0.3305" stopColor="#EDDD82" />
|
||||
<stop offset="0.5709" stopColor="#D0D4AD" />
|
||||
<stop offset="0.7589" stopColor="#BFCFC7" />
|
||||
<stop offset="0.8699" stopColor="#B8CDD1" />
|
||||
</radialGradient>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.4 KiB |
3
frontend/public/Logos/agno.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100">
|
||||
<image width="100" height="100" href="data:image/webp;base64,UklGRqQEAABXRUJQVlA4IJgEAACQJQCdASrhAOEAPm02mUikIyKhJVZ4CIANiWVu4XPw+dx/zz8Y9kq6J+Kv5T8nN4H6MM2Hod6xffvuy+jnoX8wD9Zekj5gP2S/YDsAegB+ovWUeg9/GfPP/cn4dP2/9JnVbPMU2BWP/6ShYsB/INjyXNw88GLWEjqTk7r3B1Jyd0Q60mjXFTuiHUwvdoU9QiTOwXp8O6KdYSPcXtgV/YIjrUu+9aTZYQwhSBOiHuLpesMKhiEeAz91s6AFmAINNYJeBpyuf9KScX17NiZWEjlmJ+PsBWx4Be/O9Vng220j2g97IwDP4CHt7ScFK+ikuWNp4z1G60XIv5OXis+Sp/e/QkOjbE0qKut3VrL3hQfSVuuLp2XQBvxPw8eyiMbE05kQ6k5O6Ml23DSy92Ix+s9tw0svcgAA/Jba8JQ+0uNQuBewKWqirnyDRkjzUXXOeoNxj0dA3QpklBe8QAAMu8geavhwzfTJ2OUqOrwpg8PXz6sktEQLDnTlNclju/V9f4Ny3CgDYwBvljjNaFA8PR18BGAyzmVuHsa0hgskCkgbT6nZi/Vr9AXTc/UZtUS8Z3pKg4hMjWCXdMx0JCsu6v+8ghZmHf/y0VcGbDiugOy3WH36R3zSOIJh1NjhcNYlKfWilZD5ohEfCaTn/D4u3kgC/kLeLF06X5oe/zKySrWWEPQe965rTf4pcEGBZQGkauwtkLPJWVcHoNzJshb6gdVqs0hye7fI5nFTsbJaZmqz8kXeMSEV4zhApVaDyl3Q2P/ATfChzCP87kOBxeVrhSkNkMWS/e1j58dVzpyTubHOt04Z2NKpTlEjBganrkKIYvtuYWGoJWEmtMVtM9TuzNWa3jKX3kWc9zWmQHHawveaHnJn4HJXQwbTC4ZIlxa5el1b8GgqUXzOeF9LIWA3eb7i34/Q3GXZcbs5DLsukl03KclLo5hW5uf04JjGQR5NXHd8as68c3K9Kze3147QuIn1ytP7uE9us639xDuAiHDPr9JiVn62mv80Q2ErNLD+d6VYdhm6J2PCLY5/9mHTExTnIBzRtNoErP6qmMiCXmSM4sX7s14u3yhaE+dVjX1xtxUWtFjH0GWCN/1JNmjtElHq+FjqPWse7tIMvkKm7xFBgoM8yUJGxY+HviWNkOHipLPh7RJ3KehnEZTPIhR5GHlhlATLx+QUQ61+WwhffiT45KPRiK9v8Cd+1YShd6ZGqFSqsxt4FuMSvpMUB+Rd51pUUPuwXHzpXcyz6BXkWw0Mi3jbfH3kxet2Bsqzu0XWmJ2Oj097qP4ayirQzJewKHo8LS0cFG73Lvox2tmD7ZhuRRVmGjyz62sUMVDSzIUN2BHghed2IdUHVnAkof5DrdZIx+umeHp2XSityHdbTywo7r1+ve5+vPk8hhxhISBjbK+qikfPVnhJ8R/L7KUUqchh3AGKVmRGVgXh09MDaEiW/qqPYzP+MBiKfM5eTcHuGLymyFvT2em/o58zIcp7nKtDr90BsurnuOA0XaH3ZPDpSFAUN3HL9Zj7mpQtcFMfpCaC1VMv0l2UvY8xo/6qYAG6UEbsogAAAAA=" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
1
frontend/public/Logos/amazon-bedrock.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" fill="currentColor" fill-rule="evenodd" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>AWS</title><path d="M6.763 11.212q.002.446.088.71c.064.176.144.368.256.576.04.063.056.127.056.183q.002.12-.152.24l-.503.335a.4.4 0 0 1-.208.072q-.12-.002-.239-.112a2.5 2.5 0 0 1-.287-.375 6 6 0 0 1-.248-.471q-.934 1.101-2.347 1.101c-.67 0-1.205-.191-1.596-.574-.39-.384-.59-.894-.59-1.533 0-.678.24-1.23.726-1.644.487-.415 1.133-.623 1.955-.623.272 0 .551.024.846.064.296.04.6.104.918.176v-.583q-.001-.908-.375-1.277c-.255-.248-.686-.367-1.3-.367-.28 0-.568.031-.863.103s-.583.16-.862.272a2 2 0 0 1-.28.104.5.5 0 0 1-.127.023q-.168.002-.168-.247v-.391c0-.128.016-.224.056-.28a.6.6 0 0 1 .224-.167 4.6 4.6 0 0 1 1.005-.36 4.8 4.8 0 0 1 1.246-.151c.95 0 1.644.216 2.091.647q.661.646.662 1.963v2.586zm-3.24 1.214c.263 0 .534-.048.822-.144a1.8 1.8 0 0 0 .758-.51 1.3 1.3 0 0 0 .272-.512c.047-.191.08-.423.08-.694v-.335a7 7 0 0 0-.735-.136 6 6 0 0 0-.75-.048c-.535 0-.926.104-1.19.32-.263.215-.39.518-.39.917 0 .375.095.655.295.846.191.2.47.296.838.296m6.41.862c-.144 0-.24-.024-.304-.08-.064-.048-.12-.16-.168-.311L7.586 6.726a1.4 1.4 0 0 1-.072-.32c0-.128.064-.2.191-.2h.783q.227-.001.31.08c.065.048.113.16.16.312l1.342 5.284 1.245-5.284q.058-.24.151-.312a.55.55 0 0 1 .32-.08h.638c.152 0 .256.025.32.08.063.048.12.16.151.312l1.261 5.348 1.381-5.348q.074-.24.16-.312a.52.52 0 0 1 .311-.08h.743c.127 0 .2.065.2.2 0 .04-.009.08-.017.128a1 1 0 0 1-.056.2l-1.923 6.17q-.072.24-.168.311a.5.5 0 0 1-.303.08h-.687c-.15 0-.255-.024-.32-.08-.063-.056-.119-.16-.15-.32L12.32 7.747l-1.23 5.14c-.04.16-.087.264-.15.32-.065.056-.177.08-.32.08zm10.256.215c-.415 0-.83-.048-1.229-.143-.399-.096-.71-.2-.918-.32-.128-.071-.215-.151-.247-.223a.6.6 0 0 1-.048-.224v-.407c0-.167.064-.247.183-.247q.072 0 .144.024c.048.016.12.048.2.08q.408.181.878.279c.32.064.63.096.95.096.502 0 .894-.088 1.165-.264a.86.86 0 0 0 .415-.758.78.78 0 0 0-.215-.559c-.144-.151-.416-.287-.807-.415l-1.157-.36c-.583-.183-1.014-.454-1.277-.813a1.9 1.9 0 0 1-.4-1.158q0-.502.216-.886c.144-.255.335-.479.575-.654.24-.184.51-.32.83-.415.32-.096.655-.136 1.006-.136.175 0 .36.008.535.032.183.024.35.056.518.088q.24.058.455.127.216.072.336.144a.7.7 0 0 1 .24.2.43.43 0 0 1 .071.263v.375q-.002.254-.184.256a.8.8 0 0 1-.303-.096 3.65 3.65 0 0 0-1.532-.311c-.455 0-.815.071-1.062.223s-.375.383-.375.71c0 .224.08.416.24.567.16.152.454.304.877.44l1.134.358c.574.184.99.44 1.237.767s.367.702.367 1.117c0 .343-.072.655-.207.926a2.2 2.2 0 0 1-.583.703c-.248.2-.543.343-.886.447-.36.111-.734.167-1.142.167"/><path fill="#f90" d="M.378 15.475c3.384 1.963 7.56 3.153 11.877 3.153 2.914 0 6.114-.607 9.06-1.852.44-.2.814.287.383.607-2.626 1.94-6.442 2.969-9.722 2.969-4.598 0-8.74-1.7-11.87-4.526-.247-.223-.024-.527.272-.351m23.531-.2c.287.36-.08 2.826-1.485 4.007-.215.184-.423.088-.327-.151l.175-.439c.343-.88.802-2.198.52-2.555-.336-.43-2.22-.207-3.074-.103-.255.032-.295-.192-.063-.36 1.5-1.053 3.967-.75 4.254-.399"/></svg>
|
||||
|
After Width: | Height: | Size: 3.0 KiB |
16
frontend/public/Logos/anthropic-api-monitoring.svg
Normal file
@@ -0,0 +1,16 @@
|
||||
<svg version="1.1" id="Layer_1" xmlns:x="ns_extend;" xmlns:i="ns_ai;" xmlns:graph="ns_graphs;" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 92.2 65" style="enable-background:new 0 0 92.2 65;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<metadata>
|
||||
<sfw xmlns="ns_sfw;">
|
||||
<slices>
|
||||
</slices>
|
||||
<sliceSourceBounds bottomLeftOrigin="true" height="65" width="92.2" x="-43.7" y="-98">
|
||||
</sliceSourceBounds>
|
||||
</sfw>
|
||||
</metadata>
|
||||
<path class="st0" d="M66.5,0H52.4l25.7,65h14.1L66.5,0z M25.7,0L0,65h14.4l5.3-13.6h26.9L51.8,65h14.4L40.5,0C40.5,0,25.7,0,25.7,0z
|
||||
M24.3,39.3l8.8-22.8l8.8,22.8H24.3z">
|
||||
</path>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 714 B |
1
frontend/public/Logos/autogen.svg
Normal file
|
After Width: | Height: | Size: 20 KiB |
1
frontend/public/Logos/azure-openai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>Azure</title><path fill="url(#a)" d="M7.242 1.613A1.11 1.11 0 0 1 8.295.857h6.977L8.03 22.316a1.11 1.11 0 0 1-1.052.755h-5.43a1.11 1.11 0 0 1-1.053-1.466z"/><path fill="#0078d4" d="M18.397 15.296H7.4a.51.51 0 0 0-.347.882l7.066 6.595c.206.192.477.298.758.298h6.226z"/><path fill="url(#b)" d="M15.272.857H7.497L0 23.071h7.775l1.596-4.73 5.068 4.73h6.665l-2.707-7.775h-7.998z"/><path fill="url(#c)" d="M17.193 1.613a1.11 1.11 0 0 0-1.052-.756h-7.81.035c.477 0 .9.304 1.052.756l6.748 19.992a1.11 1.11 0 0 1-1.052 1.466h-.12 7.895a1.11 1.11 0 0 0 1.052-1.466z"/><defs><linearGradient id="a" x1="8.247" x2="1.002" y1="1.626" y2="23.03" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0669bc"/></linearGradient><linearGradient id="b" x1="14.042" x2="12.324" y1="15.302" y2="15.888" gradientUnits="userSpaceOnUse"><stop stop-opacity=".3"/><stop offset=".071" stop-opacity=".2"/><stop offset=".321" stop-opacity=".1"/><stop offset=".623" stop-opacity=".05"/><stop offset="1" stop-opacity="0"/></linearGradient><linearGradient id="c" x1="12.841" x2="20.793" y1="1.626" y2="22.814" gradientUnits="userSpaceOnUse"><stop stop-color="#3ccbf4"/><stop offset="1" stop-color="#2892df"/></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/claude-code.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Claude</title><path d="M4.709 15.955l4.72-2.647.08-.23-.08-.128H9.2l-.79-.048-2.698-.073-2.339-.097-2.266-.122-.571-.121L0 11.784l.055-.352.48-.321.686.06 1.52.103 2.278.158 1.652.097 2.449.255h.389l.055-.157-.134-.098-.103-.097-2.358-1.596-2.552-1.688-1.336-.972-.724-.491-.364-.462-.158-1.008.656-.722.881.06.225.061.893.686 1.908 1.476 2.491 1.833.365.304.145-.103.019-.073-.164-.274-1.355-2.446-1.446-2.49-.644-1.032-.17-.619a2.97 2.97 0 01-.104-.729L6.283.134 6.696 0l.996.134.42.364.62 1.414 1.002 2.229 1.555 3.03.456.898.243.832.091.255h.158V9.01l.128-1.706.237-2.095.23-2.695.08-.76.376-.91.747-.492.584.28.48.685-.067.444-.286 1.851-.559 2.903-.364 1.942h.212l.243-.242.985-1.306 1.652-2.064.73-.82.85-.904.547-.431h1.033l.76 1.129-.34 1.166-1.064 1.347-.881 1.142-1.264 1.7-.79 1.36.073.11.188-.02 2.856-.606 1.543-.28 1.841-.315.833.388.091.395-.328.807-1.969.486-2.309.462-3.439.813-.042.03.049.061 1.549.146.662.036h1.622l3.02.225.79.522.474.638-.079.485-1.215.62-1.64-.389-3.829-.91-1.312-.329h-.182v.11l1.093 1.068 2.006 1.81 2.509 2.33.127.578-.322.455-.34-.049-2.205-1.657-.851-.747-1.926-1.62h-.128v.17l.444.649 2.345 3.521.122 1.08-.17.353-.608.213-.668-.122-1.374-1.925-1.415-2.167-1.143-1.943-.14.08-.674 7.254-.316.37-.729.28-.607-.461-.322-.747.322-1.476.389-1.924.315-1.53.286-1.9.17-.632-.012-.042-.14.018-1.434 1.967-2.18 2.945-1.726 1.845-.414.164-.717-.37.067-.662.401-.589 2.388-3.036 1.44-1.882.93-1.086-.006-.158h-.055L4.132 18.56l-1.13.146-.487-.456.061-.746.231-.243 1.908-1.312-.006.006z" fill="#D97757" fill-rule="nonzero"></path></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
1
frontend/public/Logos/crew-ai.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24"><title>CrewAI</title><path fill="#461816" d="M19.41 10.783a2.75 2.75 0 0 1 2.471 1.355c.483.806.622 1.772.385 2.68l-.136.522a10 10 0 0 1-3.156 5.058c-.605.517-1.283 1.062-2.083 1.524l-.028.017c-.402.232-.884.511-1.398.756-1.19.602-2.475.997-3.798 1.167-.854.111-1.716.155-2.577.132h-.018a8.6 8.6 0 0 1-5.046-1.87l-.012-.01-.012-.01A8.02 8.02 0 0 1 1.22 17.42a10.9 10.9 0 0 1-.102-3.779A15.6 15.6 0 0 1 2.88 8.4a21.8 21.8 0 0 1 2.432-3.678 15.4 15.4 0 0 1 3.56-3.182A10 10 0 0 1 12.44.104h.004l.003-.002c2.057-.384 3.743.374 5.024 1.26a8.3 8.3 0 0 1 2.395 2.513l.024.04.023.042a5.47 5.47 0 0 1 .508 4.012c-.239.97-.577 1.914-1.01 2.814z"/><path fill="#fff" d="M18.861 13.165a.748.748 0 0 1 1.256.031c.199.332.256.73.159 1.103l-.137.522a7.94 7.94 0 0 1-2.504 4.014c-.572.49-1.138.939-1.774 1.306-.427.247-.857.496-1.303.707a9.6 9.6 0 0 1-3.155.973 14.3 14.3 0 0 1-2.257.116 6.53 6.53 0 0 1-3.837-1.422 5.97 5.97 0 0 1-2.071-3.494 8.9 8.9 0 0 1-.085-3.08 13.6 13.6 0 0 1 1.54-4.568 19.7 19.7 0 0 1 2.212-3.348 13.4 13.4 0 0 1 3.088-2.76 7.9 7.9 0 0 1 2.832-1.14c1.307-.245 2.434.207 3.481.933a6.2 6.2 0 0 1 1.806 1.892c.423.767.536 1.668.314 2.515a12.4 12.4 0 0 1-.99 2.67l-.223.497q-.48 1.07-.97 2.137a.76.76 0 0 1-.97.467 3.39 3.39 0 0 1-2.283-2.49c-.095-.83.04-1.669.39-2.426.288-.746.61-1.477.933-2.208l.248-.563a.53.53 0 0 0-.204-.742 2.35 2.35 0 0 0-1.2.702 25 25 0 0 0-1.614 1.767 21.6 21.6 0 0 0-2.619 4.184 7.6 7.6 0 0 0-.816 2.753 7 7 0 0 0 .07 2.219 2.055 2.055 0 0 0 1.934 1.715c1.801.1 3.59-.363 5.116-1.328a19 19 0 0 0 1.675-1.294c.752-.71 1.376-1.519 1.958-2.36"/></svg>
|
||||
|
After Width: | Height: | Size: 1.7 KiB |
1
frontend/public/Logos/dashboards.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="150" height="150" fill="none" viewBox="0 0 150 150"><circle cx="75" cy="75" r="70" fill="#f3f4f6"/><g transform="translate(25 25)"><rect width="60" height="55" fill="#fcd34d" rx="6"/><path stroke="#d97706" stroke-linecap="round" stroke-linejoin="round" stroke-width="4" d="m10 40 15-15 15 8 15-23"/><rect width="35" height="55" x="65" fill="#6ee7b7" rx="6"/><rect width="20" height="6" x="73" y="10" fill="#059669" rx="3"/><rect width="15" height="6" x="73" y="22" fill="#059669" opacity=".7" rx="3"/><rect width="18" height="6" x="73" y="34" fill="#059669" opacity=".7" rx="3"/><rect width="100" height="40" y="60" fill="#a5b4fc" rx="6"/><rect width="80" height="8" x="10" y="70" fill="#4f46e5" rx="4"/><rect width="50" height="8" x="20" y="83" fill="#6366f1" rx="4"/></g></svg>
|
||||
|
After Width: | Height: | Size: 826 B |
1
frontend/public/Logos/deepseek.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>DeepSeek</title><path d="M23.748 4.482c-.254-.124-.364.113-.512.234-.051.039-.094.09-.137.136-.372.397-.806.657-1.373.626-.829-.046-1.537.214-2.163.848-.133-.782-.575-1.248-1.247-1.548-.352-.156-.708-.311-.955-.65-.172-.241-.219-.51-.305-.774-.055-.16-.11-.323-.293-.35-.2-.031-.278.136-.356.276-.313.572-.434 1.202-.422 1.84.027 1.436.633 2.58 1.838 3.393.137.093.172.187.129.323-.082.28-.18.552-.266.833-.055.179-.137.217-.329.14a5.526 5.526 0 01-1.736-1.18c-.857-.828-1.631-1.742-2.597-2.458a11.365 11.365 0 00-.689-.471c-.985-.957.13-1.743.388-1.836.27-.098.093-.432-.779-.428-.872.004-1.67.295-2.687.684a3.055 3.055 0 01-.465.137 9.597 9.597 0 00-2.883-.102c-1.885.21-3.39 1.102-4.497 2.623C.082 8.606-.231 10.684.152 12.85c.403 2.284 1.569 4.175 3.36 5.653 1.858 1.533 3.997 2.284 6.438 2.14 1.482-.085 3.133-.284 4.994-1.86.47.234.962.327 1.78.397.63.059 1.236-.03 1.705-.128.735-.156.684-.837.419-.961-2.155-1.004-1.682-.595-2.113-.926 1.096-1.296 2.746-2.642 3.392-7.003.05-.347.007-.565 0-.845-.004-.17.035-.237.23-.256a4.173 4.173 0 001.545-.475c1.396-.763 1.96-2.015 2.093-3.517.02-.23-.004-.467-.247-.588zM11.581 18c-2.089-1.642-3.102-2.183-3.52-2.16-.392.024-.321.471-.235.763.09.288.207.486.371.739.114.167.192.416-.113.603-.673.416-1.842-.14-1.897-.167-1.361-.802-2.5-1.86-3.301-3.307-.774-1.393-1.224-2.887-1.298-4.482-.02-.386.093-.522.477-.592a4.696 4.696 0 011.529-.039c2.132.312 3.946 1.265 5.468 2.774.868.86 1.525 1.887 2.202 2.891.72 1.066 1.494 2.082 2.48 2.914.348.292.625.514.891.677-.802.09-2.14.11-3.054-.614zm1-6.44a.306.306 0 01.415-.287.302.302 0 01.2.288.306.306 0 01-.31.307.303.303 0 01-.304-.308zm3.11 1.596c-.2.081-.399.151-.59.16a1.245 1.245 0 01-.798-.254c-.274-.23-.47-.358-.552-.758a1.73 1.73 0 01.016-.588c.07-.327-.008-.537-.239-.727-.187-.156-.426-.199-.688-.199a.559.559 0 01-.254-.078c-.11-.054-.2-.19-.114-.358.028-.054.16-.186.192-.21.356-.202.767-.136 1.146.016.352.144.618.408 1.001.782.391.451.462.576.685.914.176.265.336.537.445.848.067.195-.019.354-.25.452z" fill="#4D6BFE"></path></svg>
|
||||
|
After Width: | Height: | Size: 2.1 KiB |
1
frontend/public/Logos/envoy.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 128 128"><path fill="#b31aab" d="m33.172 61.48.176 7.325 7.797 4.785-.176-7.324Zm19.031 30.504-.176-7.18-6.84-4.206c-.085-.059-.203-.145-.289-.203l.176 7.207Zm-24.355 9.688L10.012 90.715l-.438-18.367 8.758-3.746-.172-7.352-13.969 5.969c-1.074.46-1.714 1.441-1.687 2.566l.523 22.055c.032 1.125.73 2.25 1.836 2.941l21.383 13.149c.992.605 2.215.78 3.203.46a.8.8 0 0 0 .29-.113l13.124-5.593-7.129-4.383Zm0 0"/><path fill="#d163ce" d="M85.488 61.047c-.031-1.328-.843-2.625-2.125-3.43L57.38 41.672l-.813.344.176 7.726 20.57 12.63.493 20.648 7.86 4.812.433-.172ZM54.383 97.289 30.262 82.47l-.582-24.883 11-4.7-.203-8.562-17.082 7.293c-1.25.547-2.008 1.672-1.977 3l.668 29.18c.031 1.324.844 2.625 2.125 3.402l28.281 17.387c1.164.723 2.563.894 3.754.547.117-.028.234-.086.348-.145l16.703-7.12-8.32-5.102Zm0 0"/><path fill="#e13eaf" d="M122.234 40.633 85.98 18.343c-1.335-.808-2.937-1.038-4.304-.605-.145.028-.262.086-.406.145l-35.383 15.11c-1.426.605-2.297 1.902-2.27 3.429l.903 37.371c.03 1.527.96 2.996 2.445 3.89l36.254 22.262c1.336.805 2.937 1.035 4.277.606.145-.031.262-.09.406-.145l35.383-15.11c1.426-.605 2.297-1.933 2.27-3.433l-.875-37.367c-.028-1.473-.961-2.969-2.446-3.863M85.398 91.64 53.891 72.293l-.79-32.496 30.727-13.121 31.512 19.347.785 32.497Zm0 0"/></svg>
|
||||
|
After Width: | Height: | Size: 1.3 KiB |
1
frontend/public/Logos/fly-io.svg
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
3
frontend/public/Logos/google-adk.svg
Normal file
|
After Width: | Height: | Size: 11 KiB |
1
frontend/public/Logos/google-gemini.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Gemini</title><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="#3186FF"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-0)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-1)"></path><path d="M20.616 10.835a14.147 14.147 0 01-4.45-3.001 14.111 14.111 0 01-3.678-6.452.503.503 0 00-.975 0 14.134 14.134 0 01-3.679 6.452 14.155 14.155 0 01-4.45 3.001c-.65.28-1.318.505-2.002.678a.502.502 0 000 .975c.684.172 1.35.397 2.002.677a14.147 14.147 0 014.45 3.001 14.112 14.112 0 013.679 6.453.502.502 0 00.975 0c.172-.685.397-1.351.677-2.003a14.145 14.145 0 013.001-4.45 14.113 14.113 0 016.453-3.678.503.503 0 000-.975 13.245 13.245 0 01-2.003-.678z" fill="url(#lobe-icons-gemini-fill-2)"></path><defs><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-0" x1="7" x2="11" y1="15.5" y2="12"><stop stop-color="#08B962"></stop><stop offset="1" stop-color="#08B962" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-1" x1="8" x2="11.5" y1="5.5" y2="11"><stop stop-color="#F94543"></stop><stop offset="1" stop-color="#F94543" stop-opacity="0"></stop></linearGradient><linearGradient gradientUnits="userSpaceOnUse" id="lobe-icons-gemini-fill-2" x1="3.5" x2="17.5" y1="13.5" y2="12"><stop stop-color="#FABC12"></stop><stop offset=".46" stop-color="#FABC12" stop-opacity="0"></stop></linearGradient></defs></svg>
|
||||
|
After Width: | Height: | Size: 2.8 KiB |
7
frontend/public/Logos/grok.svg
Normal file
|
After Width: | Height: | Size: 14 KiB |
1
frontend/public/Logos/groq.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg fill="currentColor" fill-rule="evenodd" height="1em" style="flex:none;line-height:1" viewBox="0 0 24 24" width="1em" xmlns="http://www.w3.org/2000/svg"><title>Groq</title><path d="M12.036 2c-3.853-.035-7 3-7.036 6.781-.035 3.782 3.055 6.872 6.908 6.907h2.42v-2.566h-2.292c-2.407.028-4.38-1.866-4.408-4.23-.029-2.362 1.901-4.298 4.308-4.326h.1c2.407 0 4.358 1.915 4.365 4.278v6.305c0 2.342-1.944 4.25-4.323 4.279a4.375 4.375 0 01-3.033-1.252l-1.851 1.818A7 7 0 0012.029 22h.092c3.803-.056 6.858-3.083 6.879-6.816v-6.5C18.907 4.963 15.817 2 12.036 2z"></path></svg>
|
||||
|
After Width: | Height: | Size: 568 B |
1
frontend/public/Logos/honeycomb.svg
Normal file
|
After Width: | Height: | Size: 5.5 KiB |