Compare commits

..

135 Commits

Author SHA1 Message Date
nikhilmantri0902
a55e65ae43 chore: rename 2026-04-29 11:52:00 +05:30
nikhilmantri0902
7f14bc0e0a chore: namespaces code 2026-04-29 11:52:00 +05:30
Nikhil Mantri
1c9e31ad00 Merge branch 'main' into infraM/v2_nodes_list_api 2026-04-29 11:50:36 +05:30
swapnil-signoz
14a032119a chore: bumping cloud integration agent version to v0.0.10 (#11135)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* chore: bumping agent version to v0.0.10

* chore: depployment
2026-04-29 05:23:22 +00:00
Yunus M
e78dfc1622 Azure service integration UI (#11117)
* feat: azure integration - ui refactor

* feat: implement AWS cloud account integration UI components and connection handling

* feat: add Azure cloud account integration UI components and connection handling

* feat: enhance Azure cloud account setup UI with prerequisites and accordion for how it works section

* feat: enhance styling for Azure and AWS account management

* refactor: clean up state initialization and destructuring in AWS and HeroSection components

* fix: update import path for ServiceDashboards in S3Sync test

* feat: add Denmark East region to Azure regions and enhance Azure account removal messaging

* chore: remove prefer-signoz-ui-icons ESLint rule and update telemetry event naming
2026-04-29 03:58:38 +00:00
nikhilmantri0902
8b0e8f666e chore: v2 nodes api 2026-04-28 17:02:19 +05:30
nikhilmantri0902
1d89b03f10 chore: merged main and resolved conflicts 2026-04-28 16:38:57 +05:30
nikhilmantri0902
3a61a78986 chore: merged base branch 2026-04-28 12:48:20 +05:30
Nikhil Mantri
46e833faba Merge branch 'main' into infraM/v2_pods_list_api 2026-04-28 12:15:56 +05:30
nikhilmantri0902
4bd7492629 chore: updated comment 2026-04-28 12:07:04 +05:30
nikhilmantri0902
401701e036 chore: metadata fix 2026-04-27 19:23:11 +05:30
nikhilmantri0902
3bec0df0ad chore: nodes list v2 full blown 2026-04-27 16:17:39 +05:30
Nikhil Mantri
24fe9a986d feat(infra-monitoring): v2 pods list apis - phase counts when custom grouping (#11088)
* chore: added phase counts feature

* chore: added queries for pod phase counts in custom group by

* chore: added unknown phase count

* fix: isPodUIDInGroupBy in buildPodRecords

* chore: 3 cte --> 2 cte

* chore: pod phase with local table of time series as counts

* chore: comment correction

* chore: corrected comment

* chore: value column for samples table added

* chore: removed query G for phase counts

* chore: rename variable

* chore: added PodPhaseNum constants to types
2026-04-27 16:04:55 +05:30
nikhilmantri0902
520e92049c Merge branch 'feat/v2_pods_list_api_phase_counts' of github.com:SigNoz/signoz into infraM/v2_nodes_list_api 2026-04-27 15:43:09 +05:30
Nikhil Mantri
92d297ac9d Merge branch 'main' into infraM/v2_pods_list_api 2026-04-27 15:05:04 +05:30
nikhilmantri0902
eff29aefba chore: added PodPhaseNum constants to types 2026-04-27 13:59:08 +05:30
nikhilmantri0902
ca73453c9e chore: rename variable 2026-04-27 13:53:17 +05:30
nikhilmantri0902
1d836d674f chore: removed query G for phase counts 2026-04-27 13:46:03 +05:30
nikhilmantri0902
83724b0cde chore: value column for samples table added 2026-04-27 13:09:46 +05:30
nikhilmantri0902
3050e37ec7 chore: corrected comment 2026-04-27 12:43:17 +05:30
Ashwin Bhatkal
bdbaa32485 Merge branch 'main' into infraM/v2_pods_list_api 2026-04-27 11:44:13 +05:30
nikhilmantri0902
55b2215025 chore: comment correction 2026-04-24 17:14:56 +05:30
nikhilmantri0902
e90378e618 Merge branch 'infraM/v2_pods_list_api' into feat/v2_pods_list_api_phase_counts 2026-04-24 16:47:15 +05:30
nikhilmantri0902
4592b78f48 chore: pod phase with local table of time series as counts 2026-04-24 15:27:38 +05:30
nikhilmantri0902
d81c99feae chore: 3 cte --> 2 cte 2026-04-24 14:58:26 +05:30
nikhilmantri0902
65a456ff9e fix: isPodUIDInGroupBy in buildPodRecords 2026-04-24 14:36:43 +05:30
nikhilmantri0902
264577b673 chore: added unknown phase count 2026-04-24 13:29:09 +05:30
Ashwin Bhatkal
b35c6676f9 fix: rebase fixes 2026-04-24 13:17:02 +05:30
nikhilmantri0902
c78c9a42db chore: merged base 2026-04-24 13:03:21 +05:30
nikhilmantri0902
1095caa123 chore: improved api description to document -1 as no data in numeric fields 2026-04-24 12:11:45 +05:30
nikhilmantri0902
9043b49762 chore: removed pods - order by phase 2026-04-24 12:04:51 +05:30
nikhilmantri0902
d4084a7494 chore: added support for pod phase unknown 2026-04-24 11:46:26 +05:30
nikhilmantri0902
27c564b3bf chore: added required tags 2026-04-24 11:21:20 +05:30
Nikhil Mantri
f02c491828 Merge branch 'main' into infraM/v2_pods_list_api 2026-04-24 10:47:13 +05:30
Nikhil Mantri
3d53b8f77f Merge branch 'main' into infraM/v2_pods_list_api 2026-04-23 18:44:33 +05:30
nikhilmantri0902
dffe94fec4 chore: conflicts resolved 2026-04-23 18:39:39 +05:30
nikhilmantri0902
b7d4f18aae chore: added queries for pod phase counts in custom group by 2026-04-23 18:01:26 +05:30
nikhilmantri0902
9ad2ec428a chore: added phase counts feature 2026-04-23 16:50:39 +05:30
nikhilmantri0902
c9360fcf13 Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-23 11:23:49 +05:30
nikhilmantri0902
b5ab45db20 chore: regen api client for inframonitoring
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-23 10:51:35 +05:30
Nikhil Mantri
08f76aca78 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-23 09:51:01 +05:30
nikhilmantri0902
983d4fe4f2 Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-22 15:37:21 +05:30
nikhilmantri0902
833af794c3 chore: make sort stable in case of tiebreaker by comparing composite group by keys 2026-04-22 15:26:28 +05:30
nikhilmantri0902
21b51d1fcc chore: cleanup and rename 2026-04-22 15:13:00 +05:30
nikhilmantri0902
56f22682c8 Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-22 14:29:17 +05:30
nikhilmantri0902
9c8359940c chore: remove a defensive nil map check, the function ensure non-nil map when err nil 2026-04-22 11:59:01 +05:30
Nikhil Mantri
4050880275 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-22 11:35:57 +05:30
nikhilmantri0902
5e775f64f2 chore: added status unauthorized 2026-04-21 21:30:44 +05:30
nikhilmantri0902
0189f23f46 chore: removed internal server error 2026-04-21 21:30:01 +05:30
nikhilmantri0902
49a36d4e3d chore: removed pod metric temporalities 2026-04-21 21:24:49 +05:30
nikhilmantri0902
9407d658ab chore: merge base hosts v2 branch 2026-04-21 21:17:28 +05:30
nikhilmantri0902
5035712485 chore: added json tag required: true 2026-04-21 18:50:25 +05:30
nikhilmantri0902
bab17c3615 chore: comments resolve 2026-04-21 18:33:56 +05:30
Nikhil Mantri
37b44f4db9 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-21 17:40:06 +05:30
nikhilmantri0902
99dd6e5f1e chore: pods code restructuring 2026-04-21 17:03:13 +05:30
nikhilmantri0902
9c7131fa6a chore: merge base branch 2026-04-21 16:22:55 +05:30
Nikhil Mantri
ad889a2e1d Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-21 13:48:53 +05:30
nikhilmantri0902
a4f6d0cbf5 chore: removed temporalities 2026-04-21 13:44:06 +05:30
nikhilmantri0902
589bed7c16 chore: comments correction 2026-04-21 12:50:51 +05:30
nikhilmantri0902
93843a1f48 chore: file structure further breakdown for clarity 2026-04-21 12:36:07 +05:30
nikhilmantri0902
88c43108fc chore: added types package 2026-04-20 18:52:43 +05:30
nikhilmantri0902
ed4cf540e8 chore: inframonitoring types renaming 2026-04-20 18:47:28 +05:30
nikhilmantri0902
9e2dfa9033 chore: rearrangement 2026-04-20 17:51:03 +05:30
nikhilmantri0902
d98d5d68ee chore: rename PodsList -> ListPods 2026-04-20 16:57:21 +05:30
nikhilmantri0902
2cb1c3b73b chore: rename HostsList -> ListHosts 2026-04-20 16:42:19 +05:30
nikhilmantri0902
ae7ca497ad chore: merged base hosts branch and reorganized code 2026-04-20 13:38:25 +05:30
Nikhil Mantri
a579916961 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-20 11:05:36 +05:30
Nikhil Mantri
4a16d56abf feat(infra-monitoring): v2 hosts list - return counts of active & inactive hosts for custom group by attributes (#10956)
* chore: add functionality for showing active and inactive counts in custom group by

* chore: bug fix

* chore: added subquery for active and total count

* chore: ignore empty string hosts in get active hosts

* fix: sinceUnixMilli for determining active hosts compute once per request

* chore: refactor code
2026-04-20 10:41:15 +05:30
Nikhil Mantri
642b5ac3f0 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-16 16:32:39 +05:30
Nikhil Mantri
a12112619c Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-16 15:41:35 +05:30
nikhilmantri0902
014785f1bc chore: ignore empty string hosts in get active hosts 2026-04-16 13:17:15 +05:30
Nikhil Mantri
58ee797b10 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-15 14:18:29 +05:30
Nikhil Mantri
82d236742f Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-15 11:21:33 +05:30
nikhilmantri0902
397e1ad5be chore: added TODOs and made filterByStatus a part of filter struct 2026-04-14 18:32:48 +05:30
nikhilmantri0902
8d6b25ca9b chore: resolved conflicts 2026-04-14 17:09:17 +05:30
nikhilmantri0902
5fa6bd8b8d Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-13 11:02:14 +05:30
nikhilmantri0902
bd9977483b chore: improved description 2026-04-11 11:31:35 +05:30
nikhilmantri0902
50fbdfeeef chore: validate order by to validate function 2026-04-10 19:01:45 +05:30
nikhilmantri0902
e2b1b73e87 chore: improvements 2026-04-10 13:23:33 +05:30
nikhilmantri0902
cb9f3fd3e5 chore: rearrage 2026-04-10 00:39:23 +05:30
nikhilmantri0902
232acc343d chore: escape backtick to prevent sql injection 2026-04-10 00:01:01 +05:30
nikhilmantri0902
2025afdccc chore: endpoint modification openapi 2026-04-09 23:25:59 +05:30
nikhilmantri0902
d2f4d4af93 chore: endpoint correction 2026-04-09 23:21:57 +05:30
Nikhil Mantri
47ff7bbb8e Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-09 23:20:39 +05:30
Nikhil Mantri
724071c5dc Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-09 18:30:15 +05:30
nikhilmantri0902
4d24979358 chore: frontend fix 2026-04-09 18:26:42 +05:30
nikhilmantri0902
042943b10a chore: distributed samples table to local table change for get metadata 2026-04-09 18:24:45 +05:30
nikhilmantri0902
48a9be7ec8 chore: added required metrics check 2026-04-09 17:38:48 +05:30
nikhilmantri0902
a9504b2120 chore: added a TODO remark 2026-04-09 16:08:34 +05:30
nikhilmantri0902
8755887c4a chore: added better metrics existence check 2026-04-09 16:01:35 +05:30
Nikhil Mantri
4cb4662b3a Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-09 15:14:25 +05:30
nikhilmantri0902
e6900dabc8 chore: warnings added passing from queryResponse warning to host lists response struct 2026-04-09 00:09:38 +05:30
nikhilmantri0902
c1ba389b63 chore: add type for response and files rearrange 2026-04-08 23:35:53 +05:30
nikhilmantri0902
3a1f40234f Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-08 23:03:50 +05:30
Nikhil Mantri
2e4891fa63 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-08 16:07:57 +05:30
Nikhil Mantri
04ebc0bec7 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-08 11:08:10 +05:30
nikhilmantri0902
271f9b81ed Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-07 21:55:47 +05:30
nikhilmantri0902
6fa815c294 chore: modified getMetadata query 2026-04-07 18:55:57 +05:30
nikhilmantri0902
63ec518efb chore: added hostName logic 2026-04-07 17:36:15 +05:30
nikhilmantri0902
c4ca20dd90 chore: return errors from getMetadata and lint fix 2026-04-07 17:01:13 +05:30
nikhilmantri0902
e56cc4222b chore: return errors from getMetadata and lint fix 2026-04-07 16:57:35 +05:30
nikhilmantri0902
07d2944d7c chore: yarn generate api 2026-04-07 16:44:06 +05:30
nikhilmantri0902
dea01ae36a chore: hostStatusNone added for clarity that this field can be left empty as well in payload 2026-04-07 16:32:25 +05:30
nikhilmantri0902
62ea5b54e2 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-07 14:09:48 +05:30
nikhilmantri0902
e549a7e42f chore: added pods list api updates 2026-04-07 13:58:10 +05:30
nikhilmantri0902
90e2ebb11f Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-07 13:51:35 +05:30
nikhilmantri0902
61baa1be7a chore: code improvements 2026-04-07 13:49:00 +05:30
nikhilmantri0902
b946fa665f Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-07 11:15:35 +05:30
nikhilmantri0902
2e049556e4 chore: unified composite key function 2026-04-07 11:15:03 +05:30
nikhilmantri0902
492a5e70d7 chore: added pods metrics temporality 2026-04-06 17:33:44 +05:30
nikhilmantri0902
ba1f2771e8 Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-06 17:18:44 +05:30
nikhilmantri0902
7458fb4855 Merge branch 'main' into infraM/v2_hosts_list_api 2026-04-06 17:18:01 +05:30
nikhilmantri0902
5f55f3938b chore: added temporalities of metrics 2026-04-06 17:17:15 +05:30
nikhilmantri0902
3e8102485c Merge branch 'infraM/v2_hosts_list_api' into infraM/v2_pods_list_api 2026-04-04 20:52:50 +05:30
nikhilmantri0902
861c682ea5 chore: nil pointer dereference fix in req.Filter 2026-04-04 20:52:08 +05:30
nikhilmantri0902
c8e5895dff chore: nil pointer check 2026-04-04 20:45:04 +05:30
nikhilmantri0902
82d72e7edb chore: pods api meta start time 2026-04-04 17:18:04 +05:30
nikhilmantri0902
a3f8ecaaf1 chore: merged base branch 2026-04-04 16:47:10 +05:30
nikhilmantri0902
19aada656c chore: updated spec 2026-04-04 16:44:15 +05:30
nikhilmantri0902
b21bb4280f chore: updated openapi yml 2026-04-04 16:38:22 +05:30
nikhilmantri0902
bc0a4fdb5c chore: added pods list logic 2026-04-04 13:24:46 +05:30
nikhilmantri0902
37fb0e9254 Merge branch 'infraM/base_dependencies' into infraM/v2_hosts_list_api 2026-04-03 17:49:00 +05:30
nikhilmantri0902
aecfa1a174 chore: added validation on order by 2026-04-02 20:13:30 +05:30
nikhilmantri0902
b869d23d94 chore: moved funcs 2026-04-02 20:02:22 +05:30
nikhilmantri0902
6ee3d44f76 chore: removed isSendingK8sAgentsMetricsCode 2026-04-02 19:58:30 +05:30
nikhilmantri0902
462e554107 chore: yarn generate api 2026-04-02 14:49:15 +05:30
nikhilmantri0902
66afa73e6f chore: return status as a string 2026-04-02 14:39:02 +05:30
nikhilmantri0902
54c604bcf4 chore: added some unit tests 2026-04-02 14:20:27 +05:30
nikhilmantri0902
c1be02ba54 chore: added validate function 2026-04-02 14:14:34 +05:30
nikhilmantri0902
d3c7ba8f45 chore: disk usage 2026-04-02 14:01:18 +05:30
nikhilmantri0902
039c4a0496 fix: bug fix 2026-04-02 11:32:49 +05:30
nikhilmantri0902
51a94b6bbc chore: added logic for hosts v3 api 2026-04-02 02:52:28 +05:30
nikhilmantri0902
bbfbb94f52 chore: merged main 2026-04-01 00:45:40 +05:30
nikhilmantri0902
d1eb9ef16f chore: endpoint detail update 2026-03-31 16:16:31 +05:30
nikhilmantri0902
3db00f8bc3 chore: baseline setup 2026-03-31 15:27:18 +05:30
87 changed files with 4264 additions and 1689 deletions

View File

@@ -2103,8 +2103,6 @@ components:
type: boolean
org_id:
type: string
source:
type: string
updatedAt:
format: date-time
type: string
@@ -2136,7 +2134,6 @@ components:
type: object
DashboardtypesStorableDashboardData:
additionalProperties: {}
nullable: true
type: object
DashboardtypesUpdatablePublicDashboard:
properties:
@@ -2477,6 +2474,132 @@ components:
- requiredMetricsCheck
- endTimeBeforeRetention
type: object
InframonitoringtypesNamespaceRecord:
properties:
failedPodCount:
type: integer
meta:
additionalProperties: {}
nullable: true
type: object
namespaceCPU:
format: double
type: number
namespaceMemory:
format: double
type: number
namespaceName:
type: string
pendingPodCount:
type: integer
runningPodCount:
type: integer
succeededPodCount:
type: integer
unknownPodCount:
type: integer
required:
- namespaceName
- namespaceCPU
- namespaceMemory
- pendingPodCount
- runningPodCount
- succeededPodCount
- failedPodCount
- unknownPodCount
- meta
type: object
InframonitoringtypesNamespaces:
properties:
endTimeBeforeRetention:
type: boolean
records:
items:
$ref: '#/components/schemas/InframonitoringtypesNamespaceRecord'
nullable: true
type: array
requiredMetricsCheck:
$ref: '#/components/schemas/InframonitoringtypesRequiredMetricsCheck'
total:
type: integer
type:
$ref: '#/components/schemas/InframonitoringtypesResponseType'
warning:
$ref: '#/components/schemas/Querybuildertypesv5QueryWarnData'
required:
- type
- records
- total
- requiredMetricsCheck
- endTimeBeforeRetention
type: object
InframonitoringtypesNodeCondition:
enum:
- ready
- not_ready
- ""
type: string
InframonitoringtypesNodeRecord:
properties:
condition:
$ref: '#/components/schemas/InframonitoringtypesNodeCondition'
meta:
additionalProperties: {}
nullable: true
type: object
nodeCPU:
format: double
type: number
nodeCPUAllocatable:
format: double
type: number
nodeMemory:
format: double
type: number
nodeMemoryAllocatable:
format: double
type: number
nodeName:
type: string
notReadyNodesCount:
type: integer
readyNodesCount:
type: integer
required:
- nodeName
- condition
- readyNodesCount
- notReadyNodesCount
- nodeCPU
- nodeCPUAllocatable
- nodeMemory
- nodeMemoryAllocatable
- meta
type: object
InframonitoringtypesNodes:
properties:
endTimeBeforeRetention:
type: boolean
records:
items:
$ref: '#/components/schemas/InframonitoringtypesNodeRecord'
nullable: true
type: array
requiredMetricsCheck:
$ref: '#/components/schemas/InframonitoringtypesRequiredMetricsCheck'
total:
type: integer
type:
$ref: '#/components/schemas/InframonitoringtypesResponseType'
warning:
$ref: '#/components/schemas/Querybuildertypesv5QueryWarnData'
required:
- type
- records
- total
- requiredMetricsCheck
- endTimeBeforeRetention
type: object
InframonitoringtypesPodPhase:
enum:
- pending
@@ -2594,6 +2717,58 @@ components:
- end
- limit
type: object
InframonitoringtypesPostableNamespaces:
properties:
end:
format: int64
type: integer
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
nullable: true
type: array
limit:
type: integer
offset:
type: integer
orderBy:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
start:
format: int64
type: integer
required:
- start
- end
- limit
type: object
InframonitoringtypesPostableNodes:
properties:
end:
format: int64
type: integer
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
nullable: true
type: array
limit:
type: integer
offset:
type: integer
orderBy:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
start:
format: int64
type: integer
required:
- start
- end
- limit
type: object
InframonitoringtypesPostablePods:
properties:
end:
@@ -10058,186 +10233,6 @@ paths:
summary: Update a span mapper
tags:
- spanmapper
/api/v1/system/{source}:
get:
deprecated: false
description: This endpoint returns the system dashboard for the callers org
keyed by source (e.g. ai-o11y-overview).
operationId: GetSystemDashboard
parameters:
- in: path
name: source
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/DashboardtypesDashboard'
status:
type: string
required:
- status
- data
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Get system dashboard
tags:
- systemdashboard
put:
deprecated: false
description: This endpoint replaces the system dashboard for the callers org
with the provided payload.
operationId: UpdateSystemDashboard
parameters:
- in: path
name: source
required: true
schema:
type: string
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/DashboardtypesStorableDashboardData'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/DashboardtypesDashboard'
status:
type: string
required:
- status
- data
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- EDITOR
- tokenizer:
- EDITOR
summary: Update system dashboard
tags:
- systemdashboard
/api/v1/system/{source}/reset:
post:
deprecated: false
description: This resets edited/updated system dashboard to default system dashboard.
operationId: ResetSystemDashboard
parameters:
- in: path
name: source
required: true
schema:
type: string
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/DashboardtypesDashboard'
status:
type: string
required:
- status
- data
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- EDITOR
- tokenizer:
- EDITOR
summary: Reset system dashboard to defaults
tags:
- systemdashboard
/api/v1/testChannel:
post:
deprecated: true
@@ -11283,6 +11278,145 @@ paths:
summary: List Hosts for Infra Monitoring
tags:
- inframonitoring
/api/v2/infra_monitoring/namespaces:
post:
deprecated: false
description: 'Returns a paginated list of Kubernetes namespaces with key aggregated
pod metrics: CPU usage and memory working set (summed across pods in the group),
plus per-group pod counts bucketed by each pod''s latest k8s.pod.phase value
in the window (pendingPodCount, runningPodCount, succeededPodCount, failedPodCount,
unknownPodCount). Each namespace includes metadata attributes (k8s.namespace.name,
k8s.cluster.name). The response type is ''list'' for the default k8s.namespace.name
grouping or ''grouped_list'' for custom groupBy keys; in both modes every
row aggregates pods in the group. Supports filtering via a filter expression,
custom groupBy, ordering by cpu / memory, and pagination via offset/limit.
Also reports missing required metrics and whether the requested time range
falls before the data retention boundary. Numeric metric fields (namespaceCPU,
namespaceMemory) return -1 as a sentinel when no data is available for that
field.'
operationId: ListNamespaces
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/InframonitoringtypesPostableNamespaces'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/InframonitoringtypesNamespaces'
status:
type: string
required:
- status
- data
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: List Namespaces for Infra Monitoring
tags:
- inframonitoring
/api/v2/infra_monitoring/nodes:
post:
deprecated: false
description: 'Returns a paginated list of Kubernetes nodes with key metrics:
CPU usage, CPU allocatable, memory working set, memory allocatable, and per-group
readyNodesCount / notReadyNodesCount derived from each node''s latest k8s.node.condition_ready
value in the window. Each node includes metadata attributes (k8s.node.uid,
k8s.cluster.name). The response type is ''list'' for the default k8s.node.name
grouping (each row is one node with its current condition string: ready /
not_ready / '''') or ''grouped_list'' for custom groupBy keys (each row aggregates
nodes in the group with readyNodesCount and notReadyNodesCount; condition
stays empty). Supports filtering via a filter expression, custom groupBy,
ordering by cpu / cpu_allocatable / memory / memory_allocatable, and pagination
via offset/limit. Also reports missing required metrics and whether the requested
time range falls before the data retention boundary. Numeric metric fields
(nodeCPU, nodeCPUAllocatable, nodeMemory, nodeMemoryAllocatable) return -1
as a sentinel when no data is available for that field.'
operationId: ListNodes
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/InframonitoringtypesPostableNodes'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/InframonitoringtypesNodes'
status:
type: string
required:
- status
- data
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: List Nodes for Infra Monitoring
tags:
- inframonitoring
/api/v2/infra_monitoring/pods:
post:
deprecated: false

View File

@@ -13,8 +13,12 @@ import type {
import type {
InframonitoringtypesPostableHostsDTO,
InframonitoringtypesPostableNamespacesDTO,
InframonitoringtypesPostableNodesDTO,
InframonitoringtypesPostablePodsDTO,
ListHosts200,
ListNamespaces200,
ListNodes200,
ListPods200,
RenderErrorResponseDTO,
} from '../sigNoz.schemas';
@@ -106,6 +110,174 @@ export const useListHosts = <
return useMutation(mutationOptions);
};
/**
* Returns a paginated list of Kubernetes namespaces with key aggregated pod metrics: CPU usage and memory working set (summed across pods in the group), plus per-group pod counts bucketed by each pod's latest k8s.pod.phase value in the window (pendingPodCount, runningPodCount, succeededPodCount, failedPodCount, unknownPodCount). Each namespace includes metadata attributes (k8s.namespace.name, k8s.cluster.name). The response type is 'list' for the default k8s.namespace.name grouping or 'grouped_list' for custom groupBy keys; in both modes every row aggregates pods in the group. Supports filtering via a filter expression, custom groupBy, ordering by cpu / memory, and pagination via offset/limit. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (namespaceCPU, namespaceMemory) return -1 as a sentinel when no data is available for that field.
* @summary List Namespaces for Infra Monitoring
*/
export const listNamespaces = (
inframonitoringtypesPostableNamespacesDTO: BodyType<InframonitoringtypesPostableNamespacesDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ListNamespaces200>({
url: `/api/v2/infra_monitoring/namespaces`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: inframonitoringtypesPostableNamespacesDTO,
signal,
});
};
export const getListNamespacesMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof listNamespaces>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNamespacesDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof listNamespaces>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNamespacesDTO> },
TContext
> => {
const mutationKey = ['listNamespaces'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof listNamespaces>>,
{ data: BodyType<InframonitoringtypesPostableNamespacesDTO> }
> = (props) => {
const { data } = props ?? {};
return listNamespaces(data);
};
return { mutationFn, ...mutationOptions };
};
export type ListNamespacesMutationResult = NonNullable<
Awaited<ReturnType<typeof listNamespaces>>
>;
export type ListNamespacesMutationBody =
BodyType<InframonitoringtypesPostableNamespacesDTO>;
export type ListNamespacesMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary List Namespaces for Infra Monitoring
*/
export const useListNamespaces = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof listNamespaces>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNamespacesDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof listNamespaces>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNamespacesDTO> },
TContext
> => {
const mutationOptions = getListNamespacesMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* Returns a paginated list of Kubernetes nodes with key metrics: CPU usage, CPU allocatable, memory working set, memory allocatable, and per-group readyNodesCount / notReadyNodesCount derived from each node's latest k8s.node.condition_ready value in the window. Each node includes metadata attributes (k8s.node.uid, k8s.cluster.name). The response type is 'list' for the default k8s.node.name grouping (each row is one node with its current condition string: ready / not_ready / '') or 'grouped_list' for custom groupBy keys (each row aggregates nodes in the group with readyNodesCount and notReadyNodesCount; condition stays empty). Supports filtering via a filter expression, custom groupBy, ordering by cpu / cpu_allocatable / memory / memory_allocatable, and pagination via offset/limit. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (nodeCPU, nodeCPUAllocatable, nodeMemory, nodeMemoryAllocatable) return -1 as a sentinel when no data is available for that field.
* @summary List Nodes for Infra Monitoring
*/
export const listNodes = (
inframonitoringtypesPostableNodesDTO: BodyType<InframonitoringtypesPostableNodesDTO>,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ListNodes200>({
url: `/api/v2/infra_monitoring/nodes`,
method: 'POST',
headers: { 'Content-Type': 'application/json' },
data: inframonitoringtypesPostableNodesDTO,
signal,
});
};
export const getListNodesMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof listNodes>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNodesDTO> },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof listNodes>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNodesDTO> },
TContext
> => {
const mutationKey = ['listNodes'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof listNodes>>,
{ data: BodyType<InframonitoringtypesPostableNodesDTO> }
> = (props) => {
const { data } = props ?? {};
return listNodes(data);
};
return { mutationFn, ...mutationOptions };
};
export type ListNodesMutationResult = NonNullable<
Awaited<ReturnType<typeof listNodes>>
>;
export type ListNodesMutationBody =
BodyType<InframonitoringtypesPostableNodesDTO>;
export type ListNodesMutationError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary List Nodes for Infra Monitoring
*/
export const useListNodes = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof listNodes>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNodesDTO> },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof listNodes>>,
TError,
{ data: BodyType<InframonitoringtypesPostableNodesDTO> },
TContext
> => {
const mutationOptions = getListNodesMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* Returns a paginated list of Kubernetes pods with key metrics: CPU usage, CPU request/limit utilization, memory working set, memory request/limit utilization, current pod phase (pending/running/succeeded/failed/unknown), and pod age (ms since start time). Each pod includes metadata attributes (namespace, node, workload owner such as deployment/statefulset/daemonset/job/cronjob, cluster). Supports filtering via a filter expression, custom groupBy to aggregate pods by any attribute, ordering by any of the six metrics (cpu, cpu_request, cpu_limit, memory, memory_request, memory_limit), and pagination via offset/limit. The response type is 'list' for the default k8s.pod.uid grouping (each row is one pod with its current phase) or 'grouped_list' for custom groupBy keys (each row aggregates pods in the group with per-phase counts: pendingPodCount, runningPodCount, succeededPodCount, failedPodCount, unknownPodCount derived from each pod's latest phase in the window). Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (podCPU, podCPURequest, podCPULimit, podMemory, podMemoryRequest, podMemoryLimit, podAge) return -1 as a sentinel when no data is available for that field.
* @summary List Pods for Infra Monitoring

View File

@@ -2781,10 +2781,6 @@ export interface DashboardtypesDashboardDTO {
* @type string
*/
org_id?: string;
/**
* @type string
*/
source?: string;
/**
* @type string
* @format date-time
@@ -2827,12 +2823,9 @@ export interface DashboardtypesPostablePublicDashboardDTO {
timeRangeEnabled?: boolean;
}
/**
* @nullable
*/
export type DashboardtypesStorableDashboardDataDTO = {
export interface DashboardtypesStorableDashboardDataDTO {
[key: string]: unknown;
} | null;
}
export interface DashboardtypesUpdatablePublicDashboardDTO {
/**
@@ -3250,6 +3243,146 @@ export interface InframonitoringtypesHostsDTO {
warning?: Querybuildertypesv5QueryWarnDataDTO;
}
/**
* @nullable
*/
export type InframonitoringtypesNamespaceRecordDTOMeta = {
[key: string]: unknown;
} | null;
export interface InframonitoringtypesNamespaceRecordDTO {
/**
* @type integer
*/
failedPodCount: number;
/**
* @type object
* @nullable true
*/
meta: InframonitoringtypesNamespaceRecordDTOMeta;
/**
* @type number
* @format double
*/
namespaceCPU: number;
/**
* @type number
* @format double
*/
namespaceMemory: number;
/**
* @type string
*/
namespaceName: string;
/**
* @type integer
*/
pendingPodCount: number;
/**
* @type integer
*/
runningPodCount: number;
/**
* @type integer
*/
succeededPodCount: number;
/**
* @type integer
*/
unknownPodCount: number;
}
export interface InframonitoringtypesNamespacesDTO {
/**
* @type boolean
*/
endTimeBeforeRetention: boolean;
/**
* @type array
* @nullable true
*/
records: InframonitoringtypesNamespaceRecordDTO[] | null;
requiredMetricsCheck: InframonitoringtypesRequiredMetricsCheckDTO;
/**
* @type integer
*/
total: number;
type: InframonitoringtypesResponseTypeDTO;
warning?: Querybuildertypesv5QueryWarnDataDTO;
}
export enum InframonitoringtypesNodeConditionDTO {
ready = 'ready',
not_ready = 'not_ready',
'' = '',
}
/**
* @nullable
*/
export type InframonitoringtypesNodeRecordDTOMeta = {
[key: string]: unknown;
} | null;
export interface InframonitoringtypesNodeRecordDTO {
condition: InframonitoringtypesNodeConditionDTO;
/**
* @type object
* @nullable true
*/
meta: InframonitoringtypesNodeRecordDTOMeta;
/**
* @type number
* @format double
*/
nodeCPU: number;
/**
* @type number
* @format double
*/
nodeCPUAllocatable: number;
/**
* @type number
* @format double
*/
nodeMemory: number;
/**
* @type number
* @format double
*/
nodeMemoryAllocatable: number;
/**
* @type string
*/
nodeName: string;
/**
* @type integer
*/
notReadyNodesCount: number;
/**
* @type integer
*/
readyNodesCount: number;
}
export interface InframonitoringtypesNodesDTO {
/**
* @type boolean
*/
endTimeBeforeRetention: boolean;
/**
* @type array
* @nullable true
*/
records: InframonitoringtypesNodeRecordDTO[] | null;
requiredMetricsCheck: InframonitoringtypesRequiredMetricsCheckDTO;
/**
* @type integer
*/
total: number;
type: InframonitoringtypesResponseTypeDTO;
warning?: Querybuildertypesv5QueryWarnDataDTO;
}
export enum InframonitoringtypesPodPhaseDTO {
pending = 'pending',
running = 'running',
@@ -3380,6 +3513,62 @@ export interface InframonitoringtypesPostableHostsDTO {
start: number;
}
export interface InframonitoringtypesPostableNamespacesDTO {
/**
* @type integer
* @format int64
*/
end: number;
filter?: Querybuildertypesv5FilterDTO;
/**
* @type array
* @nullable true
*/
groupBy?: Querybuildertypesv5GroupByKeyDTO[] | null;
/**
* @type integer
*/
limit: number;
/**
* @type integer
*/
offset?: number;
orderBy?: Querybuildertypesv5OrderByDTO;
/**
* @type integer
* @format int64
*/
start: number;
}
export interface InframonitoringtypesPostableNodesDTO {
/**
* @type integer
* @format int64
*/
end: number;
filter?: Querybuildertypesv5FilterDTO;
/**
* @type array
* @nullable true
*/
groupBy?: Querybuildertypesv5GroupByKeyDTO[] | null;
/**
* @type integer
*/
limit: number;
/**
* @type integer
*/
offset?: number;
orderBy?: Querybuildertypesv5OrderByDTO;
/**
* @type integer
* @format int64
*/
start: number;
}
export interface InframonitoringtypesPostablePodsDTO {
/**
* @type integer
@@ -7342,39 +7531,6 @@ export type UpdateSpanMapperPathParameters = {
groupId: string;
mapperId: string;
};
export type GetSystemDashboardPathParameters = {
source: string;
};
export type GetSystemDashboard200 = {
data: DashboardtypesDashboardDTO;
/**
* @type string
*/
status: string;
};
export type UpdateSystemDashboardPathParameters = {
source: string;
};
export type UpdateSystemDashboard200 = {
data: DashboardtypesDashboardDTO;
/**
* @type string
*/
status: string;
};
export type ResetSystemDashboardPathParameters = {
source: string;
};
export type ResetSystemDashboard200 = {
data: DashboardtypesDashboardDTO;
/**
* @type string
*/
status: string;
};
export type ListUsersDeprecated200 = {
/**
* @type array
@@ -7557,6 +7713,22 @@ export type ListHosts200 = {
status: string;
};
export type ListNamespaces200 = {
data: InframonitoringtypesNamespacesDTO;
/**
* @type string
*/
status: string;
};
export type ListNodes200 = {
data: InframonitoringtypesNodesDTO;
/**
* @type string
*/
status: string;
};
export type ListPods200 = {
data: InframonitoringtypesPodsDTO;
/**

View File

@@ -1,318 +0,0 @@
/**
* ! Do not edit manually
* * The file has been auto-generated using Orval for SigNoz
* * regenerate with 'yarn generate:api'
* SigNoz
*/
import { useMutation, useQuery } from 'react-query';
import type {
InvalidateOptions,
MutationFunction,
QueryClient,
QueryFunction,
QueryKey,
UseMutationOptions,
UseMutationResult,
UseQueryOptions,
UseQueryResult,
} from 'react-query';
import type {
DashboardtypesStorableDashboardDataDTO,
GetSystemDashboard200,
GetSystemDashboardPathParameters,
RenderErrorResponseDTO,
ResetSystemDashboard200,
ResetSystemDashboardPathParameters,
UpdateSystemDashboard200,
UpdateSystemDashboardPathParameters,
} from '../sigNoz.schemas';
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
import type { ErrorType, BodyType } from '../../../generatedAPIInstance';
/**
* This endpoint returns the system dashboard for the callers org keyed by source (e.g. ai-o11y-overview).
* @summary Get system dashboard
*/
export const getSystemDashboard = (
{ source }: GetSystemDashboardPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<GetSystemDashboard200>({
url: `/api/v1/system/${source}`,
method: 'GET',
signal,
});
};
export const getGetSystemDashboardQueryKey = ({
source,
}: GetSystemDashboardPathParameters) => {
return [`/api/v1/system/${source}`] as const;
};
export const getGetSystemDashboardQueryOptions = <
TData = Awaited<ReturnType<typeof getSystemDashboard>>,
TError = ErrorType<RenderErrorResponseDTO>,
>(
{ source }: GetSystemDashboardPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getSystemDashboard>>,
TError,
TData
>;
},
) => {
const { query: queryOptions } = options ?? {};
const queryKey =
queryOptions?.queryKey ?? getGetSystemDashboardQueryKey({ source });
const queryFn: QueryFunction<
Awaited<ReturnType<typeof getSystemDashboard>>
> = ({ signal }) => getSystemDashboard({ source }, signal);
return {
queryKey,
queryFn,
enabled: !!source,
...queryOptions,
} as UseQueryOptions<
Awaited<ReturnType<typeof getSystemDashboard>>,
TError,
TData
> & { queryKey: QueryKey };
};
export type GetSystemDashboardQueryResult = NonNullable<
Awaited<ReturnType<typeof getSystemDashboard>>
>;
export type GetSystemDashboardQueryError = ErrorType<RenderErrorResponseDTO>;
/**
* @summary Get system dashboard
*/
export function useGetSystemDashboard<
TData = Awaited<ReturnType<typeof getSystemDashboard>>,
TError = ErrorType<RenderErrorResponseDTO>,
>(
{ source }: GetSystemDashboardPathParameters,
options?: {
query?: UseQueryOptions<
Awaited<ReturnType<typeof getSystemDashboard>>,
TError,
TData
>;
},
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
const queryOptions = getGetSystemDashboardQueryOptions({ source }, options);
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
queryKey: QueryKey;
};
query.queryKey = queryOptions.queryKey;
return query;
}
/**
* @summary Get system dashboard
*/
export const invalidateGetSystemDashboard = async (
queryClient: QueryClient,
{ source }: GetSystemDashboardPathParameters,
options?: InvalidateOptions,
): Promise<QueryClient> => {
await queryClient.invalidateQueries(
{ queryKey: getGetSystemDashboardQueryKey({ source }) },
options,
);
return queryClient;
};
/**
* This endpoint replaces the system dashboard for the callers org with the provided payload.
* @summary Update system dashboard
*/
export const updateSystemDashboard = (
{ source }: UpdateSystemDashboardPathParameters,
dashboardtypesStorableDashboardDataDTO: BodyType<DashboardtypesStorableDashboardDataDTO>,
) => {
return GeneratedAPIInstance<UpdateSystemDashboard200>({
url: `/api/v1/system/${source}`,
method: 'PUT',
headers: { 'Content-Type': 'application/json' },
data: dashboardtypesStorableDashboardDataDTO,
});
};
export const getUpdateSystemDashboardMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof updateSystemDashboard>>,
TError,
{
pathParams: UpdateSystemDashboardPathParameters;
data: BodyType<DashboardtypesStorableDashboardDataDTO>;
},
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof updateSystemDashboard>>,
TError,
{
pathParams: UpdateSystemDashboardPathParameters;
data: BodyType<DashboardtypesStorableDashboardDataDTO>;
},
TContext
> => {
const mutationKey = ['updateSystemDashboard'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof updateSystemDashboard>>,
{
pathParams: UpdateSystemDashboardPathParameters;
data: BodyType<DashboardtypesStorableDashboardDataDTO>;
}
> = (props) => {
const { pathParams, data } = props ?? {};
return updateSystemDashboard(pathParams, data);
};
return { mutationFn, ...mutationOptions };
};
export type UpdateSystemDashboardMutationResult = NonNullable<
Awaited<ReturnType<typeof updateSystemDashboard>>
>;
export type UpdateSystemDashboardMutationBody =
BodyType<DashboardtypesStorableDashboardDataDTO>;
export type UpdateSystemDashboardMutationError =
ErrorType<RenderErrorResponseDTO>;
/**
* @summary Update system dashboard
*/
export const useUpdateSystemDashboard = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof updateSystemDashboard>>,
TError,
{
pathParams: UpdateSystemDashboardPathParameters;
data: BodyType<DashboardtypesStorableDashboardDataDTO>;
},
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof updateSystemDashboard>>,
TError,
{
pathParams: UpdateSystemDashboardPathParameters;
data: BodyType<DashboardtypesStorableDashboardDataDTO>;
},
TContext
> => {
const mutationOptions = getUpdateSystemDashboardMutationOptions(options);
return useMutation(mutationOptions);
};
/**
* This resets edited/updated system dashboard to default system dashboard.
* @summary Reset system dashboard to defaults
*/
export const resetSystemDashboard = (
{ source }: ResetSystemDashboardPathParameters,
signal?: AbortSignal,
) => {
return GeneratedAPIInstance<ResetSystemDashboard200>({
url: `/api/v1/system/${source}/reset`,
method: 'POST',
signal,
});
};
export const getResetSystemDashboardMutationOptions = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof resetSystemDashboard>>,
TError,
{ pathParams: ResetSystemDashboardPathParameters },
TContext
>;
}): UseMutationOptions<
Awaited<ReturnType<typeof resetSystemDashboard>>,
TError,
{ pathParams: ResetSystemDashboardPathParameters },
TContext
> => {
const mutationKey = ['resetSystemDashboard'];
const { mutation: mutationOptions } = options
? options.mutation &&
'mutationKey' in options.mutation &&
options.mutation.mutationKey
? options
: { ...options, mutation: { ...options.mutation, mutationKey } }
: { mutation: { mutationKey } };
const mutationFn: MutationFunction<
Awaited<ReturnType<typeof resetSystemDashboard>>,
{ pathParams: ResetSystemDashboardPathParameters }
> = (props) => {
const { pathParams } = props ?? {};
return resetSystemDashboard(pathParams);
};
return { mutationFn, ...mutationOptions };
};
export type ResetSystemDashboardMutationResult = NonNullable<
Awaited<ReturnType<typeof resetSystemDashboard>>
>;
export type ResetSystemDashboardMutationError =
ErrorType<RenderErrorResponseDTO>;
/**
* @summary Reset system dashboard to defaults
*/
export const useResetSystemDashboard = <
TError = ErrorType<RenderErrorResponseDTO>,
TContext = unknown,
>(options?: {
mutation?: UseMutationOptions<
Awaited<ReturnType<typeof resetSystemDashboard>>,
TError,
{ pathParams: ResetSystemDashboardPathParameters },
TContext
>;
}): UseMutationResult<
Awaited<ReturnType<typeof resetSystemDashboard>>,
TError,
{ pathParams: ResetSystemDashboardPathParameters },
TContext
> => {
const mutationOptions = getResetSystemDashboardMutationOptions(options);
return useMutation(mutationOptions);
};

View File

@@ -0,0 +1,22 @@
.codeBlock {
position: relative;
}
.codeBlockSyntaxHighlighter {
background-color: var(--l2-background) !important;
border-radius: 4px !important;
border: 1px solid var(--l2-border) !important;
color: var(--l2-foreground) !important;
pre {
color: var(--l2-foreground) !important;
font-family: 'Geist Mono' !important;
font-size: 12px !important;
}
code {
color: var(--l1-foreground) !important;
font-family: 'Geist Mono' !important;
font-size: 12px !important;
}
}

View File

@@ -0,0 +1,46 @@
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
import CodeBlock from './CodeBlock';
const mockCopyToClipboard = jest.fn();
jest.mock('react-use', () => ({
useCopyToClipboard: (): [unknown, (text: string) => void] => [
undefined,
mockCopyToClipboard,
],
}));
describe('CodeBlock', () => {
beforeEach(() => {
mockCopyToClipboard.mockReset();
});
it('renders code block mode by default', () => {
render(<CodeBlock code={'const x = 1;\n'} language="javascript" />);
const container = screen.getByTestId('code-block-container');
expect(container).toBeInTheDocument();
expect(container).toHaveTextContent('const x = 1;');
});
it('renders inline code when inline is true', () => {
render(<CodeBlock code="inline value" inline />);
const inlineCode = screen.getByText('inline value');
expect(inlineCode.tagName.toLowerCase()).toBe('code');
expect(screen.queryByTestId('code-block-container')).not.toBeInTheDocument();
});
it('copies code and triggers callback', async () => {
const onCopy = jest.fn();
render(<CodeBlock code="SELECT * FROM logs;" onCopy={onCopy} />);
fireEvent.click(screen.getByRole('button', { name: /copy code/i }));
await waitFor(() => {
expect(mockCopyToClipboard).toHaveBeenCalledWith('SELECT * FROM logs;');
});
expect(onCopy).toHaveBeenCalledWith('SELECT * FROM logs;');
});
});

View File

@@ -0,0 +1,89 @@
import { useMemo, useState } from 'react';
import { useCopyToClipboard } from 'react-use';
import { Check, Copy } from '@signozhq/icons';
import { Button } from '@signozhq/ui';
import SyntaxHighlighter, {
a11yDark,
} from 'components/MarkdownRenderer/syntaxHighlighter';
import styles from './CodeBlock.module.scss';
export interface CodeBlockProps {
code: string;
language?: string;
className?: string;
inline?: boolean;
showLineNumbers?: boolean;
showCopyButton?: boolean;
onCopy?: (copiedCode: string) => void;
}
function CodeBlock({
code,
language = 'text',
className,
inline = false,
showLineNumbers = false,
showCopyButton = true,
onCopy,
}: CodeBlockProps): JSX.Element {
const [isCopied, setIsCopied] = useState(false);
const [, copyToClipboard] = useCopyToClipboard();
const normalizedCode = useMemo(() => code?.replace(/\n$/, '') ?? '', [code]);
const handleCopy = (): void => {
copyToClipboard(normalizedCode);
setIsCopied(true);
onCopy?.(normalizedCode);
setTimeout(() => {
setIsCopied(false);
}, 1000);
};
if (inline) {
return <code className={className}>{normalizedCode}</code>;
}
return (
<div
className={`${styles.codeBlock} ${className}`}
style={{ position: 'relative' }}
data-testid="code-block-container"
>
{showCopyButton ? (
<Button
variant="ghost"
color="secondary"
size="sm"
onClick={handleCopy}
prefix={isCopied ? <Check size={14} /> : <Copy size={14} />}
aria-label="Copy code"
title={isCopied ? 'Copied' : 'Copy'}
style={{ position: 'absolute', right: 8, top: 8, zIndex: 1 }}
/>
) : null}
<SyntaxHighlighter
style={a11yDark}
language={language}
PreTag="div"
showLineNumbers={showLineNumbers}
wrapLongLines
className={styles.codeBlockSyntaxHighlighter}
>
{normalizedCode}
</SyntaxHighlighter>
</div>
);
}
CodeBlock.defaultProps = {
language: 'text',
className: undefined,
inline: false,
showLineNumbers: false,
showCopyButton: true,
onCopy: undefined,
};
export default CodeBlock;

View File

@@ -1,8 +1,8 @@
import { Color } from '@signozhq/design-tokens';
import { Alert, Spin } from 'antd';
import { LoaderCircle, TriangleAlert } from 'lucide-react';
import { Callout } from '@signozhq/ui';
import { Spin } from 'antd';
import { LoaderCircle } from 'lucide-react';
import { ModalStateEnum } from '../types';
import { ModalStateEnum } from '../HeroSection/types';
function AlertMessage({
modalState,
@@ -12,14 +12,13 @@ function AlertMessage({
switch (modalState) {
case ModalStateEnum.WAITING:
return (
<Alert
message={
<Callout
title={
<div className="cloud-account-setup-form__alert-message">
<Spin
indicator={
<LoaderCircle
size={14}
color={Color.BG_AMBER_400}
className="anticon anticon-loading anticon-spin ant-spin-dot"
/>
}
@@ -28,21 +27,19 @@ function AlertMessage({
<span className="retry-time">10</span> secs...
</div>
}
className="cloud-account-setup-form__alert"
type="warning"
type="info"
showIcon={false}
/>
);
case ModalStateEnum.ERROR:
return (
<Alert
message={
<Callout
title={
<div className="cloud-account-setup-form__alert-message">
<TriangleAlert type="solid" size={15} color={Color.BG_SAKURA_400} />
{`We couldn't establish a connection to your AWS account. Please try again`}
</div>
}
type="error"
className="cloud-account-setup-form__alert"
/>
);
default:

View File

@@ -117,6 +117,12 @@
min-width: 140px !important;
}
&.azure {
.ant-select-selector {
min-width: 282px !important;
}
}
.ant-select-item-option-active {
background: var(--l3-background) !important;
}

View File

@@ -1,4 +1,4 @@
import { useEffect, useMemo, useState } from 'react';
import { Dispatch, SetStateAction, useEffect, useMemo, useState } from 'react';
import { useNavigate } from 'react-router-dom-v5-compat';
import { Color } from '@signozhq/design-tokens';
import { Button } from '@signozhq/ui';
@@ -6,19 +6,29 @@ import { Select, Skeleton } from 'antd';
import { SelectProps } from 'antd/lib';
import logEvent from 'api/common/logEvent';
import { useListAccounts } from 'api/generated/services/cloudintegration';
import cx from 'classnames';
import { getAccountById } from 'container/Integrations/CloudIntegration/utils';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import {
CloudAccount as IntegrationCloudAccount,
IntegrationType,
} from 'container/Integrations/types';
import useUrlQuery from 'hooks/useUrlQuery';
import { ChevronDown, Dot, PencilLine, Plug, Plus } from 'lucide-react';
import { mapAccountDtoToAwsCloudAccount } from '../../mapAwsCloudAccountFromDto';
import { CloudAccount } from '../../types';
import AccountSettingsModal from './AccountSettingsModal';
import CloudAccountSetupModal from './CloudAccountSetupModal';
import AzureCloudAccountSetupModal from '../../AzureCloudServices/AddNewAccount/CloudAccountSetupModal';
import AzureAccountSettingsModal from '../../AzureCloudServices/EditAccount/AccountSettingsModal';
import {
mapAccountDtoToAwsCloudAccount,
mapAccountDtoToAzureCloudAccount,
} from '../../mapCloudAccountFromDto';
import AwsCloudAccountSetupModal from '../AddNewAccount/CloudAccountSetupModal';
import AwsAccountSettingsModal from '../EditAccount/AccountSettingsModal';
import { CloudAccount as AwsCloudAccount } from '../types';
import './AccountActions.style.scss';
function AccountActionsRenderer({
type,
accounts,
isLoading,
activeAccount,
@@ -27,9 +37,10 @@ function AccountActionsRenderer({
onIntegrationModalOpen,
onAccountSettingsModalOpen,
}: {
accounts: CloudAccount[] | undefined;
type: IntegrationType;
accounts: IntegrationCloudAccount[] | undefined;
isLoading: boolean;
activeAccount: CloudAccount | null;
activeAccount: IntegrationCloudAccount | null;
selectOptions: SelectProps['options'];
onAccountChange: (value: string) => void;
onIntegrationModalOpen: () => void;
@@ -57,9 +68,11 @@ function AccountActionsRenderer({
<Select
value={activeAccount?.providerAccountId}
options={selectOptions}
rootClassName="cloud-account-selector"
rootClassName={cx('cloud-account-selector', {
[type.toLowerCase()]: type,
})}
popupMatchSelectWidth={false}
placeholder="Select AWS Account"
placeholder={`Select ${type} Account`}
suffixIcon={<ChevronDown size={16} color={Color.BG_VANILLA_400} />}
onChange={onAccountChange}
/>
@@ -102,21 +115,49 @@ function AccountActionsRenderer({
);
}
function AccountActions(): JSX.Element {
function AccountActions({ type }: { type: IntegrationType }): JSX.Element {
const urlQuery = useUrlQuery();
const navigate = useNavigate();
const { data: listAccountsResponse, isLoading } = useListAccounts({
cloudProvider: INTEGRATION_TYPES.AWS,
cloudProvider: type,
});
const accounts = useMemo((): CloudAccount[] | undefined => {
const accounts = useMemo((): IntegrationCloudAccount[] | undefined => {
const raw = listAccountsResponse?.data?.accounts;
if (!raw) {
return undefined;
}
return raw
.map(mapAccountDtoToAwsCloudAccount)
.filter((account): account is CloudAccount => account !== null);
}, [listAccountsResponse]);
const mappedAccounts: IntegrationCloudAccount[] = [];
if (type === IntegrationType.AWS_SERVICES) {
raw.forEach((account) => {
if (!account) {
return;
}
const mapped = mapAccountDtoToAwsCloudAccount(account);
if (mapped) {
mappedAccounts.push(mapped);
}
});
}
if (type === IntegrationType.AZURE_SERVICES) {
raw.forEach((account) => {
if (!account) {
return;
}
const mapped = mapAccountDtoToAzureCloudAccount(account);
if (mapped) {
mappedAccounts.push(mapped);
}
});
}
return mappedAccounts;
}, [listAccountsResponse, type]);
const initialAccount = useMemo(
() =>
@@ -127,9 +168,8 @@ function AccountActions(): JSX.Element {
[accounts, urlQuery],
);
const [activeAccount, setActiveAccount] = useState<CloudAccount | null>(
initialAccount,
);
const [activeAccount, setActiveAccount] =
useState<IntegrationCloudAccount | null>(initialAccount);
// Update state when initial value changes
useEffect(() => {
@@ -149,16 +189,17 @@ function AccountActions(): JSX.Element {
}, [initialAccount]);
const [isIntegrationModalOpen, setIsIntegrationModalOpen] = useState(false);
const startAccountConnectionAttempt = (): void => {
setIsIntegrationModalOpen(true);
logEvent('AWS Integration: Account connection attempt started', {});
logEvent(`${type} Integration: Account connection attempt started`, {});
};
const [isAccountSettingsModalOpen, setIsAccountSettingsModalOpen] =
useState(false);
const openAccountSettings = (): void => {
setIsAccountSettingsModalOpen(true);
logEvent('AWS Integration: Account settings viewed', {
logEvent(`${type} Integration: Account settings viewed`, {
cloudAccountId: activeAccount?.cloud_account_id,
});
};
@@ -166,13 +207,16 @@ function AccountActions(): JSX.Element {
// log telemetry event when an account is viewed.
useEffect(() => {
if (activeAccount) {
logEvent('AWS Integration: Account viewed', {
logEvent(`${type} Integration: Account viewed`, {
cloudAccountId: activeAccount?.cloud_account_id,
status: activeAccount?.status,
enabledRegions: activeAccount?.config?.regions,
enabledRegions:
'regions' in activeAccount.config
? activeAccount.config.regions
: activeAccount.config.resource_groups,
});
}
}, [activeAccount]);
}, [activeAccount, type]);
const selectOptions: SelectProps['options'] = useMemo(
() =>
@@ -188,6 +232,7 @@ function AccountActions(): JSX.Element {
return (
<div className="hero-section__actions">
<AccountActionsRenderer
type={type}
accounts={accounts}
isLoading={isLoading}
activeAccount={activeAccount}
@@ -204,17 +249,39 @@ function AccountActions(): JSX.Element {
/>
{isIntegrationModalOpen && (
<CloudAccountSetupModal
onClose={(): void => setIsIntegrationModalOpen(false)}
/>
<>
{type === IntegrationType.AWS_SERVICES && (
<AwsCloudAccountSetupModal
onClose={(): void => setIsIntegrationModalOpen(false)}
/>
)}
{type === IntegrationType.AZURE_SERVICES && (
<AzureCloudAccountSetupModal
onClose={(): void => setIsIntegrationModalOpen(false)}
/>
)}
</>
)}
{isAccountSettingsModalOpen && activeAccount && (
<AccountSettingsModal
onClose={(): void => setIsAccountSettingsModalOpen(false)}
account={activeAccount}
setActiveAccount={setActiveAccount}
/>
<>
{type === IntegrationType.AWS_SERVICES && (
<AwsAccountSettingsModal
onClose={(): void => setIsAccountSettingsModalOpen(false)}
account={activeAccount as AwsCloudAccount}
setActiveAccount={
setActiveAccount as Dispatch<SetStateAction<AwsCloudAccount | null>>
}
/>
)}
{type === IntegrationType.AZURE_SERVICES && (
<AzureAccountSettingsModal
onClose={(): void => setIsAccountSettingsModalOpen(false)}
account={activeAccount}
setActiveAccount={setActiveAccount}
/>
)}
</>
)}
</div>
);

View File

@@ -0,0 +1,346 @@
.cloud-account-setup-modal {
background: var(--l1-background);
color: var(--l1-foreground);
[data-slot='drawer-title'] {
color: var(--l1-foreground);
}
> div {
display: flex;
flex-direction: column;
overflow: hidden;
}
&__content {
flex: 1;
overflow-y: auto;
min-height: 0;
scrollbar-width: thin;
padding-right: 16px;
&::-webkit-scrollbar-thumb {
background: var(--l1-border);
}
&::-webkit-scrollbar-track {
background: var(--l1-background);
}
scrollbar-width: thin;
scrollbar-color: var(--l3-background) var(--l1-background);
}
.cloud-account-setup-prerequisites {
display: flex;
flex-direction: column;
gap: 12px;
&__title {
color: var(--l1-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&__list {
display: flex;
flex-direction: column;
gap: 8px;
list-style: none;
padding: 0;
margin: 0;
}
&__list-item {
color: var(--l2-foreground);
font-size: 13px;
line-height: 18px;
letter-spacing: -0.06px;
display: flex;
align-items: center;
gap: 16px;
&-bullet {
color: var(--primary);
font-weight: 500;
}
&-text {
display: flex;
align-items: center;
gap: 4px;
}
}
&__list-item-highlight {
color: var(--l1-foreground);
font-weight: 500;
}
}
.cloud-account-setup-how-it-works-accordion {
display: flex;
flex-direction: column;
margin: 24px 0;
&__title {
display: flex;
align-items: center;
color: var(--l1-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
border-radius: 4px;
border: 1px solid var(--l2-border);
background: var(--l2-background);
padding: 4px 16px 4px 0px;
&.open {
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
}
}
&__description {
display: flex;
flex-direction: column;
gap: 16px;
padding: 16px;
opacity: 0;
transform: translateY(-8px);
animation: cloud-account-setup-accordion-reveal 220ms ease-out forwards;
border-radius: 4px;
border-top: none;
border-top-left-radius: 0;
border-top-right-radius: 0;
border: 1px solid var(--l2-border);
background: var(--l2-background);
&-item {
display: flex;
flex-direction: column;
gap: 8px;
color: var(--l1-foreground);
font-size: 13px;
line-height: 18px;
letter-spacing: -0.06px;
}
@media (prefers-reduced-motion: reduce) {
animation: none;
opacity: 1;
transform: none;
}
}
}
.cloud-account-setup-form__code-block-tabs {
padding: 8px;
border-radius: 4px;
border: 1px solid var(--l2-border);
background: var(--l2-background);
&-header {
display: flex;
flex-direction: column;
gap: 12px;
margin-bottom: 12px;
&-title {
color: var(--l1-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&-description {
color: var(--l2-foreground);
font-size: 12px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.06px;
}
}
[role='tablist'] {
gap: 8px !important;
}
[role='tabpanel'] {
padding: 0 !important;
}
[data-slot='tabs-trigger'] {
padding: 4px 24px !important;
border: none !important;
background-color: transparent !important;
font-size: 12px !important;
}
}
@keyframes cloud-account-setup-accordion-reveal {
from {
opacity: 0;
transform: translateY(-8px);
}
to {
opacity: 1;
transform: translateY(0);
}
}
.account-setup-modal-footer {
&__confirm-button {
background: var(--primary-background);
color: var(--primary-foreground);
font-size: 12px;
font-weight: 500;
}
&__confirm-selection-count {
font-family: 'Geist Mono';
}
&__close-button {
background: var(--l1-background);
border: 1px solid var(--l1-border);
border-radius: 2px;
color: var(--l1-foreground);
font-family: 'Inter';
font-size: 12px;
font-weight: 500;
&:hover {
border-color: var(--l1-border);
color: var(--l1-foreground);
}
}
}
.cloud-account-setup-form {
.disabled {
opacity: 0.4;
}
&,
&__content {
display: flex;
flex-direction: column;
gap: 24px;
}
&__alert {
width: 100%;
[data-slot='callout'] {
width: 100%;
box-sizing: border-box;
}
&-message {
display: flex;
align-items: center;
gap: 8px;
color: var(--l1-foreground);
.retry-time {
font-family: 'Geist Mono';
font-size: 14px;
font-weight: 600;
line-height: 22px;
letter-spacing: -0.07px;
}
}
}
&__form-group {
display: flex;
flex-direction: column;
gap: 12px;
}
&__title {
color: var(--l1-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&__description {
color: var(--l2-foreground);
font-size: 12px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.06px;
}
&__select {
.ant-select-selection-item {
color: var(--l1-foreground);
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
}
}
&__form-item {
margin: 0;
}
&__include-all-regions-switch {
display: flex;
align-items: center;
gap: 10px;
color: var(--l2-foreground);
font-size: 12px;
line-height: 18px;
letter-spacing: -0.06px;
margin-bottom: 12px;
&-label {
background-color: transparent;
border: none;
color: var(--l2-foreground);
font-size: 12px;
line-height: 18px;
letter-spacing: -0.06px;
cursor: pointer;
}
}
&__note {
padding: 12px;
color: var(--callout-primary-description);
font-size: 12px;
line-height: 22px;
letter-spacing: -0.06px;
border-radius: 4px;
border: 1px solid
color-mix(in srgb, var(--primary-background) 10%, transparent);
background: color-mix(in srgb, var(--primary-background) 10%, transparent);
}
&__submit-button {
border-radius: 2px;
background: var(--primary-background);
color: var(--primary-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
&-content {
display: flex;
align-items: center;
justify-content: center;
gap: 8px;
}
&:disabled {
opacity: 0.4;
}
}
}
}

View File

@@ -8,8 +8,8 @@ import {
ActiveViewEnum,
IntegrationModalProps,
ModalStateEnum,
} from '../types';
import { RegionForm } from './RegionForm';
} from '../../../HeroSection/types';
import { RegionForm } from '../RegionForm/RegionForm';
import './CloudAccountSetupModal.style.scss';
@@ -74,8 +74,6 @@ function CloudAccountSetupModal({
isConnectionParamsLoading,
setSelectedRegions,
setIncludeAllRegions,
isLoading,
isGeneratingUrl,
handleConnectionSuccess,
handleConnectionTimeout,
handleConnectionError,

View File

@@ -9,10 +9,10 @@ import useUrlQuery from 'hooks/useUrlQuery';
import history from 'lib/history';
import { Save } from 'lucide-react';
import logEvent from '../../../../../../api/common/logEvent';
import { CloudAccount } from '../../types';
import { RegionSelector } from './RegionSelector';
import RemoveIntegrationAccount from './RemoveIntegrationAccount';
import logEvent from '../../../../../api/common/logEvent';
import RemoveIntegrationAccount from '../../RemoveAccount/RemoveIntegrationAccount';
import { RegionSelector } from '../RegionForm/RegionSelector';
import { CloudAccount } from '../types';
import './AccountSettingsModal.style.scss';
@@ -110,11 +110,7 @@ function AccountSettingsModal({
form,
selectedRegions,
includeAllRegions,
account?.id,
handleRemoveIntegrationAccountSuccess,
isSaveDisabled,
handleSubmit,
isLoading,
account?.providerAccountId,
setSelectedRegions,
setIncludeAllRegions,
]);
@@ -133,6 +129,7 @@ function AccountSettingsModal({
<RemoveIntegrationAccount
accountId={account?.id}
onRemoveIntegrationAccountSuccess={handleRemoveIntegrationAccountSuccess}
cloudProvider={INTEGRATION_TYPES.AWS}
/>
<Button

View File

@@ -1,28 +0,0 @@
import awsDarkLogoUrl from '@/assets/Logos/aws-dark.svg';
import AccountActions from './components/AccountActions';
import './HeroSection.style.scss';
function HeroSection(): JSX.Element {
return (
<div className="hero-section">
<div className="hero-section__details">
<div className="hero-section__details-header">
<div className="hero-section__icon">
<img src={awsDarkLogoUrl} alt="AWS" />
</div>
<div className="hero-section__details-title">AWS</div>
</div>
<div className="hero-section__details-description">
AWS is a cloud computing platform that provides a range of services for
building and running applications.
</div>
</div>
<AccountActions />
</div>
);
}
export default HeroSection;

View File

@@ -1,180 +0,0 @@
.cloud-account-setup-modal {
background: var(--l1-background);
color: var(--l1-foreground);
[data-slot='drawer-title'] {
color: var(--l1-foreground);
}
> div {
display: flex;
flex-direction: column;
overflow: hidden;
}
&__content {
flex: 1;
overflow-y: auto;
min-height: 0;
scrollbar-width: thin;
&::-webkit-scrollbar {
width: 2px;
}
}
.account-setup-modal-footer {
&__confirm-button {
background: var(--primary-background);
color: var(--primary-foreground);
font-size: 12px;
font-weight: 500;
}
&__confirm-selection-count {
font-family: 'Geist Mono';
}
&__close-button {
background: var(--l1-background);
border: 1px solid var(--l1-border);
border-radius: 2px;
color: var(--l1-foreground);
font-family: 'Inter';
font-size: 12px;
font-weight: 500;
&:hover {
border-color: var(--l1-border);
color: var(--l1-foreground);
}
}
}
.cloud-account-setup-form {
.disabled {
opacity: 0.4;
}
&,
&__content {
display: flex;
flex-direction: column;
gap: 24px;
}
&__alert {
&.ant-alert {
padding: 12px;
border-radius: 6px;
font-size: 14px;
line-height: 22px; /* 157.143% */
letter-spacing: -0.07px;
}
&.ant-alert-error {
color: var(--danger-foreground);
border: 1px solid
color-mix(in srgb, var(--danger-background) 10%, transparent);
background: color-mix(in srgb, var(--danger-background) 10%, transparent);
}
&.ant-alert-warning {
color: var(--warning-foreground);
border: 1px solid
color-mix(in srgb, var(--warning-background) 10%, transparent);
background: color-mix(in srgb, var(--warning-background) 10%, transparent);
}
&-message {
display: flex;
align-items: center;
gap: 8px;
color: var(--l1-foreground);
.retry-time {
font-family: 'Geist Mono';
font-size: 14px;
font-weight: 600;
line-height: 22px;
letter-spacing: -0.07px;
}
}
}
&__form-group {
display: flex;
flex-direction: column;
gap: 12px;
}
&__title {
color: var(--l1-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
letter-spacing: -0.07px;
}
&__description {
color: var(--l2-foreground);
font-size: 12px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.06px;
}
&__select {
.ant-select-selection-item {
color: var(--l1-foreground);
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
}
}
&__form-item {
margin: 0;
}
&__include-all-regions-switch {
display: flex;
align-items: center;
gap: 10px;
color: var(--l2-foreground);
font-size: 12px;
line-height: 18px;
letter-spacing: -0.06px;
margin-bottom: 12px;
&-label {
background-color: transparent;
border: none;
color: var(--l2-foreground);
font-size: 12px;
line-height: 18px;
letter-spacing: -0.06px;
cursor: pointer;
}
}
&__note {
padding: 12px;
color: var(--callout-primary-description);
font-size: 12px;
line-height: 22px;
letter-spacing: -0.06px;
border-radius: 4px;
border: 1px solid
color-mix(in srgb, var(--primary-background) 10%, transparent);
background: color-mix(in srgb, var(--primary-background) 10%, transparent);
}
&__submit-button {
border-radius: 2px;
background: var(--primary-background);
color: var(--primary-foreground);
font-size: 14px;
font-weight: 500;
line-height: 20px;
&-content {
display: flex;
align-items: center;
justify-content: center;
gap: 8px;
}
&:disabled {
opacity: 0.4;
}
}
}
}

View File

@@ -5,7 +5,7 @@ import { ChevronDown } from 'lucide-react';
import { Region } from 'utils/regions';
import { popupContainer } from 'utils/selectPopupContainer';
import { RegionSelector } from './RegionSelector';
import { RegionSelector } from './RegionForm/RegionSelector';
// Form section components
function RegionDeploymentSection({

View File

@@ -3,15 +3,18 @@ import { Form } from 'antd';
import { useGetAccount } from 'api/generated/services/cloudintegration';
import cx from 'classnames';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import {
ModalStateEnum,
RegionFormProps,
} from 'container/Integrations/HeroSection/types';
import { regions } from 'utils/regions';
import { ModalStateEnum, RegionFormProps } from '../types';
import AlertMessage from './AlertMessage';
import AlertMessage from '../../AlertMessage';
import {
ComplianceNote,
MonitoringRegionsSection,
RegionDeploymentSection,
} from './IntegrateNowFormSections';
} from '../IntegrateNowFormSections';
import RenderConnectionFields from './RenderConnectionParams';
export function RegionForm({
@@ -76,8 +79,6 @@ export function RegionForm({
layout="vertical"
onFinish={onSubmit}
>
<AlertMessage modalState={modalState} />
<div
className={cx(`cloud-account-setup-form__content`, {
disabled: isFormDisabled,
@@ -100,6 +101,10 @@ export function RegionForm({
isFormDisabled={isFormDisabled}
/>
</div>
<div className="cloud-account-setup-form__alert">
<AlertMessage modalState={modalState} />
</div>
</Form>
);
}

View File

@@ -4,7 +4,7 @@ import { useListAccounts } from 'api/generated/services/cloudintegration';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import useUrlQuery from 'hooks/useUrlQuery';
import { mapAccountDtoToAwsCloudAccount } from '../mapAwsCloudAccountFromDto';
import { mapAccountDtoToAwsCloudAccount } from '../../mapCloudAccountFromDto';
import { CloudAccount } from '../types';
import './S3BucketsSelector.styles.scss';

View File

@@ -12,14 +12,14 @@ import {
useUpdateService,
} from 'api/generated/services/cloudintegration';
import {
CloudintegrationtypesServiceConfigDTO,
CloudintegrationtypesServiceDTO,
ListServicesMetadata200,
} from 'api/generated/services/sigNoz.schemas';
import CloudServiceDataCollected from 'components/CloudIntegrations/CloudServiceDataCollected/CloudServiceDataCollected';
import { MarkdownRenderer } from 'components/MarkdownRenderer/MarkdownRenderer';
import ServiceDashboards from 'container/Integrations/CloudIntegration/AmazonWebServices/ServiceDashboards/ServiceDashboards';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import { IServiceStatus } from 'container/Integrations/types';
import ServiceDashboards from 'container/Integrations/CloudIntegration/ServiceDashboards/ServiceDashboards';
import { IntegrationType, IServiceStatus } from 'container/Integrations/types';
import useUrlQuery from 'hooks/useUrlQuery';
import { Save, X } from 'lucide-react';
@@ -36,7 +36,81 @@ type ServiceDetailsData = CloudintegrationtypesServiceDTO & {
status?: IServiceStatus;
};
function ServiceDetails(): JSX.Element | null {
const EMPTY_FORM_VALUES: ServiceConfigFormValues = {
logsEnabled: false,
metricsEnabled: false,
s3BucketsByRegion: {},
};
function getInitialFormValues(
type: IntegrationType,
serviceDetailsData?: ServiceDetailsData,
): ServiceConfigFormValues {
const integrationConfig =
type === IntegrationType.AWS_SERVICES
? serviceDetailsData?.cloudIntegrationService?.config?.aws
: serviceDetailsData?.cloudIntegrationService?.config?.azure;
return {
logsEnabled: integrationConfig?.logs?.enabled || false,
metricsEnabled: integrationConfig?.metrics?.enabled || false,
s3BucketsByRegion:
type === IntegrationType.AWS_SERVICES
? serviceDetailsData?.cloudIntegrationService?.config?.aws?.logs
?.s3Buckets || {}
: {},
};
}
function getServiceConfigPayload({
type,
serviceId,
logsEnabled,
metricsEnabled,
isLogsSupported,
isMetricsSupported,
s3BucketsByRegion,
}: {
type: IntegrationType;
serviceId: string;
logsEnabled: boolean;
metricsEnabled: boolean;
isLogsSupported: boolean;
isMetricsSupported: boolean;
s3BucketsByRegion: Record<string, string[]>;
}): CloudintegrationtypesServiceConfigDTO {
if (type === IntegrationType.AWS_SERVICES) {
return {
aws: {
logs: {
enabled: isLogsSupported ? logsEnabled : false,
s3Buckets:
serviceId === 's3sync' && isLogsSupported ? s3BucketsByRegion : {},
},
metrics: {
enabled: isMetricsSupported ? metricsEnabled : false,
},
},
};
}
return {
azure: {
logs: {
enabled: isLogsSupported ? logsEnabled : false,
},
metrics: {
enabled: isMetricsSupported ? metricsEnabled : false,
},
},
};
}
function ServiceDetails({
type,
}: {
type: IntegrationType;
}): JSX.Element | null {
const urlQuery = useUrlQuery();
const cloudAccountId = urlQuery.get('cloudAccountId');
const serviceId = urlQuery.get('service');
@@ -51,7 +125,7 @@ function ServiceDetails(): JSX.Element | null {
isLoading: isServiceDetailsLoading,
} = useGetService(
{
cloudProvider: INTEGRATION_TYPES.AWS,
cloudProvider: type,
serviceId: serviceId || '',
},
{
@@ -65,10 +139,17 @@ function ServiceDetails(): JSX.Element | null {
},
);
const awsConfig = serviceDetailsData?.cloudIntegrationService?.config?.aws;
const integrationConfig =
type === IntegrationType.AWS_SERVICES
? serviceDetailsData?.cloudIntegrationService?.config?.aws
: serviceDetailsData?.cloudIntegrationService?.config?.azure;
const isServiceEnabledInPersistedConfig =
Boolean(awsConfig?.logs?.enabled) || Boolean(awsConfig?.metrics?.enabled);
Boolean(integrationConfig?.logs?.enabled) ||
Boolean(integrationConfig?.metrics?.enabled);
const serviceDetailsId = serviceDetailsData?.id;
const isLogsSupported = serviceDetailsData?.supportedSignals?.logs || false;
const isMetricsSupported =
serviceDetailsData?.supportedSignals?.metrics || false;
const {
control,
@@ -77,43 +158,31 @@ function ServiceDetails(): JSX.Element | null {
watch,
formState: { isDirty },
} = useForm<ServiceConfigFormValues>({
defaultValues: {
logsEnabled: awsConfig?.logs?.enabled || false,
metricsEnabled: awsConfig?.metrics?.enabled || false,
s3BucketsByRegion: awsConfig?.logs?.s3Buckets || {},
},
defaultValues: getInitialFormValues(type, serviceDetailsData),
});
const resetToAwsConfig = useCallback((): void => {
reset({
logsEnabled: awsConfig?.logs?.enabled || false,
metricsEnabled: awsConfig?.metrics?.enabled || false,
s3BucketsByRegion: awsConfig?.logs?.s3Buckets || {},
});
}, [awsConfig, reset]);
const resetToConfig = useCallback((): void => {
reset(getInitialFormValues(type, serviceDetailsData));
}, [reset, serviceDetailsData, type]);
// Ensure form state does not leak across service switches while new details load.
useEffect(() => {
reset({
logsEnabled: false,
metricsEnabled: false,
s3BucketsByRegion: {},
});
reset(EMPTY_FORM_VALUES);
}, [reset, serviceId]);
useEffect(() => {
resetToAwsConfig();
}, [resetToAwsConfig, serviceDetailsId]);
resetToConfig();
}, [resetToConfig, serviceDetailsId]);
// log telemetry event on visiting details of a service.
useEffect(() => {
if (serviceId) {
logEvent('AWS Integration: Service viewed', {
logEvent(`${type} Integration: Service viewed`, {
cloudAccountId,
serviceId,
});
}
}, [cloudAccountId, serviceId]);
}, [cloudAccountId, serviceId, type]);
const { mutate: updateService, isLoading: isUpdatingServiceConfig } =
useUpdateService();
@@ -121,8 +190,8 @@ function ServiceDetails(): JSX.Element | null {
const queryClient = useQueryClient();
const handleDiscard = useCallback((): void => {
resetToAwsConfig();
}, [resetToAwsConfig]);
resetToConfig();
}, [resetToConfig]);
const onSubmit = useCallback(
async (values: ServiceConfigFormValues): Promise<void> => {
@@ -141,25 +210,25 @@ function ServiceDetails(): JSX.Element | null {
return;
}
const serviceConfigPayload = getServiceConfigPayload({
type,
serviceId,
logsEnabled,
metricsEnabled,
isLogsSupported,
isMetricsSupported,
s3BucketsByRegion: normalizedS3BucketsByRegion,
});
updateService(
{
pathParams: {
cloudProvider: INTEGRATION_TYPES.AWS,
cloudProvider: type,
id: cloudAccountId,
serviceId,
},
data: {
config: {
aws: {
logs: {
enabled: logsEnabled,
s3Buckets: normalizedS3BucketsByRegion,
},
metrics: {
enabled: metricsEnabled,
},
},
},
config: serviceConfigPayload,
},
},
{
@@ -170,7 +239,7 @@ function ServiceDetails(): JSX.Element | null {
const servicesListQueryKey = getListServicesMetadataQueryKey(
{
cloudProvider: INTEGRATION_TYPES.AWS,
cloudProvider: type,
},
{
cloud_integration_id: cloudAccountId,
@@ -203,7 +272,7 @@ function ServiceDetails(): JSX.Element | null {
invalidateGetService(
queryClient,
{
cloudProvider: INTEGRATION_TYPES.AWS,
cloudProvider: type,
serviceId,
},
{
@@ -214,14 +283,14 @@ function ServiceDetails(): JSX.Element | null {
invalidateListServicesMetadata(
queryClient,
{
cloudProvider: INTEGRATION_TYPES.AWS,
cloudProvider: type,
},
{
cloud_integration_id: cloudAccountId,
},
);
logEvent('AWS Integration: Service settings saved', {
logEvent(`${type} Integration: Service settings saved`, {
cloudAccountId,
serviceId,
logsEnabled,
@@ -241,7 +310,16 @@ function ServiceDetails(): JSX.Element | null {
console.error('Form submission failed:', error);
}
},
[serviceId, cloudAccountId, updateService, queryClient, reset],
[
serviceId,
cloudAccountId,
updateService,
queryClient,
reset,
type,
isLogsSupported,
isMetricsSupported,
],
);
if (isServiceDetailsLoading) {
@@ -262,10 +340,6 @@ function ServiceDetails(): JSX.Element | null {
const logsEnabled = watch('logsEnabled');
const s3BucketsByRegion = watch('s3BucketsByRegion');
const isLogsSupported = serviceDetailsData?.supportedSignals?.logs || false;
const isMetricsSupported =
serviceDetailsData?.supportedSignals?.metrics || false;
const hasUnsavedChanges = isDirty;
const isS3SyncBucketsMissing =

View File

@@ -1,29 +0,0 @@
import useUrlQuery from 'hooks/useUrlQuery';
import HeroSection from './HeroSection/HeroSection';
import ServiceDetails from './ServiceDetails/ServiceDetails';
import ServicesList from './ServicesList';
import './ServicesTabs.style.scss';
function ServicesTabs(): JSX.Element {
const urlQuery = useUrlQuery();
const cloudAccountId = urlQuery.get('cloudAccountId') || '';
return (
<div className="services-tabs">
<HeroSection />
<div className="services-section">
<div className="services-section__sidebar">
<ServicesList cloudAccountId={cloudAccountId} />
</div>
<div className="services-section__content">
<ServiceDetails />
</div>
</div>
</div>
);
}
export default ServicesTabs;

View File

@@ -29,7 +29,7 @@ jest.mock('components/MarkdownRenderer/MarkdownRenderer', () => ({
MarkdownRenderer: (): JSX.Element => <div data-testid="markdown-renderer" />,
}));
jest.mock(
'container/Integrations/CloudIntegration/AmazonWebServices/ServiceDashboards/ServiceDashboards',
'container/Integrations/CloudIntegration/ServiceDashboards/ServiceDashboards',
() => ({
__esModule: true,
default: (): JSX.Element => <div data-testid="service-dashboards" />,

View File

@@ -1,4 +1,5 @@
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
import { IntegrationType } from 'container/Integrations/types';
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
import ServiceDetails from '../ServiceDetails/ServiceDetails';
@@ -11,10 +12,11 @@ import { accountsResponse } from './mockData';
const renderServiceDetails = (
_initialConfigLogsS3Buckets: Record<string, string[]> = {},
_serviceId = 's3sync',
type: IntegrationType = IntegrationType.AWS_SERVICES,
): RenderResult =>
render(
<MockQueryClientProvider>
<ServiceDetails />
<ServiceDetails type={type} />
</MockQueryClientProvider>,
);

View File

@@ -1,25 +0,0 @@
import { CloudintegrationtypesAccountDTO } from 'api/generated/services/sigNoz.schemas';
import { CloudAccount } from './types';
export function mapAccountDtoToAwsCloudAccount(
account: CloudintegrationtypesAccountDTO,
): CloudAccount | null {
if (!account.providerAccountId) {
return null;
}
return {
id: account.id,
cloud_account_id: account.id,
config: {
regions: account.config?.aws?.regions ?? [],
},
status: {
integration: {
last_heartbeat_ts_ms: account.agentReport?.timestampMillis ?? 0,
},
},
providerAccountId: account.providerAccountId,
};
}

View File

@@ -0,0 +1,356 @@
import { useCallback, useRef, useState } from 'react';
import { Color } from '@signozhq/design-tokens';
import { ChevronDown, ChevronRight } from '@signozhq/icons';
import { Button, Callout, DrawerWrapper, Tabs } from '@signozhq/ui';
import { Form, Select, Spin } from 'antd';
import { useGetAccount } from 'api/generated/services/cloudintegration';
import { CloudintegrationtypesAccountDTO } from 'api/generated/services/sigNoz.schemas';
import CodeBlock from 'components/CodeBlock/CodeBlock';
import {
AZURE_REGIONS,
INTEGRATION_TYPES,
} from 'container/Integrations/constants';
import {
IntegrationModalProps,
ModalStateEnum,
} from 'container/Integrations/HeroSection/types';
import { LoaderCircle, SquareArrowOutUpRight } from 'lucide-react';
import { popupContainer } from 'utils/selectPopupContainer';
import { useIntegrationModal } from '../../../../../hooks/integration/azure/useIntegrationModal';
import RenderConnectionFields from '../../AmazonWebServices/RegionForm/RenderConnectionParams';
import '../../AmazonWebServices/AddNewAccount/CloudAccountSetupModal.style.scss';
const AZURE_CLI_DESC =
'Paste the following command if you have Azure CLI setup locally on your machine or use BASH CloudShell on Azure portal with above mentioned permissions.';
const AZURE_POWERSHELL_DESC =
'Paste the following command in PowerShell CloudShell on Azure portal, you can switch to PowerShell on Azure portal.';
function CloudAccountSetupModal({
onClose,
}: IntegrationModalProps): JSX.Element {
const {
form,
modalState,
isLoading,
accountId,
connectionCommands,
handleSubmit,
handleClose,
connectionParams,
isConnectionParamsLoading,
handleConnectionSuccess,
handleConnectionTimeout,
handleConnectionError,
} = useIntegrationModal({ onClose });
const startTimeRef = useRef(Date.now());
const refetchInterval = 10 * 1000;
const errorTimeout = 10 * 60 * 1000;
const [isHowItWorksOpen, setIsHowItWorksOpen] = useState(true);
const [activeTab, setActiveTab] = useState('cli');
useGetAccount(
{
cloudProvider: INTEGRATION_TYPES.AZURE,
id: accountId ?? '',
},
{
query: {
enabled: Boolean(accountId) && modalState === ModalStateEnum.WAITING,
refetchInterval,
select: (response): CloudintegrationtypesAccountDTO => response.data,
onSuccess: (account) => {
const isConnected =
Boolean(account.providerAccountId) && account.removedAt === null;
if (isConnected) {
handleConnectionSuccess({
cloudAccountId: account.providerAccountId ?? account.id,
status: account.agentReport,
});
} else if (Date.now() - startTimeRef.current >= errorTimeout) {
handleConnectionTimeout({ id: accountId });
}
},
onError: () => {
handleConnectionError();
},
},
},
);
const renderAlert = useCallback((): JSX.Element | null => {
if (modalState === ModalStateEnum.WAITING) {
return (
<div className="cloud-account-setup-form__alert">
<Callout
title={
<div className="cloud-account-setup-form__alert-message">
<Spin
indicator={
<LoaderCircle
size={14}
className="anticon anticon-loading anticon-spin ant-spin-dot"
/>
}
/>
Waiting for Azure account connection, retrying in{' '}
<span className="retry-time">10</span> secs...
</div>
}
type="info"
showIcon={false}
/>
</div>
);
}
if (modalState === ModalStateEnum.ERROR) {
return (
<div className="cloud-account-setup-form__alert">
<Callout
title={
<div className="cloud-account-setup-form__alert-message">
We couldn&apos;t establish a connection to your Azure account. Please
try again
</div>
}
type="error"
/>
</div>
);
}
return null;
}, [modalState]);
const footer = (
<div className="cloud-account-setup-modal__footer">
{modalState === ModalStateEnum.FORM && (
<Button
variant="solid"
color="primary"
prefix={<SquareArrowOutUpRight size={17} color={Color.BG_VANILLA_100} />}
onClick={handleSubmit}
loading={isLoading}
>
Generate Azure Setup Commands
</Button>
)}
</div>
);
return (
<DrawerWrapper
open={true}
className="cloud-account-setup-modal"
onOpenChange={(open): void => {
if (!open) {
handleClose();
}
}}
direction="right"
showCloseButton
title="Add Azure Account"
width="wide"
footer={footer}
>
<div className="cloud-account-setup-modal__content">
<div className="cloud-account-setup-prerequisites">
<div className="cloud-account-setup-prerequisites__title">
Prerequisites
</div>
<ul className="cloud-account-setup-prerequisites__list">
<li className="cloud-account-setup-prerequisites__list-item">
<span className="cloud-account-setup-prerequisites__list-item-bullet">
</span>{' '}
<span className="cloud-account-setup-prerequisites__list-item-text">
Ensure that you&apos;re logged in to the Azure workspace which you want
to monitor.
</span>
</li>
<li className="cloud-account-setup-prerequisites__list-item">
<span className="cloud-account-setup-prerequisites__list-item-bullet">
</span>{' '}
<span className="cloud-account-setup-prerequisites__list-item-text">
Ensure that you either have the{' '}
<span className="cloud-account-setup-prerequisites__list-item-highlight">
Owner
</span>{' '}
role OR
</span>
</li>
<li className="cloud-account-setup-prerequisites__list-item">
<span className="cloud-account-setup-prerequisites__list-item-bullet">
</span>{' '}
<span className="cloud-account-setup-prerequisites__list-item-text">
Both the{' '}
<span className="cloud-account-setup-prerequisites__list-item-highlight">
Contributor
</span>{' '}
and{' '}
<span className="cloud-account-setup-prerequisites__list-item-highlight">
user access admin
</span>{' '}
roles
</span>
</li>
</ul>
</div>
<div className="cloud-account-setup-how-it-works-accordion">
<div
className={`cloud-account-setup-how-it-works-accordion__title ${
isHowItWorksOpen ? 'open' : ''
}`}
>
<Button
variant="link"
color="secondary"
onClick={(): void => setIsHowItWorksOpen(!isHowItWorksOpen)}
prefix={isHowItWorksOpen ? <ChevronDown /> : <ChevronRight />}
/>
<span className="cloud-account-setup-how-it-works-accordion__title-text">
How it works?
</span>
</div>
{isHowItWorksOpen && (
<div className="cloud-account-setup-how-it-works-accordion__description">
<div className="cloud-account-setup-how-it-works-accordion__description-item">
SigNoz will create new resource-group to manage the resources required
for this integration. The following steps will create a User-Assigned
Managed Identity with the necessary permissions and follows the
Principle of Least Privilege.
</div>
<div className="cloud-account-setup-how-it-works__description-item">
Once the Integration template is deployed, you can enable the services
you want to monitor right here in Signoz dashboard.
</div>
</div>
)}
</div>
<Form
form={form}
className="cloud-account-setup-form"
layout="vertical"
initialValues={{ resourceGroups: [] }}
>
<div className="cloud-account-setup-form__content">
<div className="cloud-account-setup-form__form-group">
<div className="cloud-account-setup-form__title">
Where should we deploy the SigNoz collector resources?
</div>
<div className="cloud-account-setup-form__description">
Choose the Azure region for deployment.
</div>
<Form.Item
name="region"
rules={[{ required: true, message: 'Please select a region' }]}
className="cloud-account-setup-form__form-item"
>
<Select
placeholder="e.g. East US"
options={AZURE_REGIONS.map((region) => ({
label: `${region.label} (${region.value})`,
value: region.value,
}))}
getPopupContainer={popupContainer}
disabled={modalState === ModalStateEnum.WAITING}
/>
</Form.Item>
</div>
<div className="cloud-account-setup-form__form-group">
<div className="cloud-account-setup-form__title">
Which resource groups do you want to monitor?
</div>
<div className="cloud-account-setup-form__description">
Add one or more Azure resource group names.
</div>
<Form.Item
name="resourceGroups"
rules={[
{
required: true,
type: 'array',
min: 1,
message: 'Please add at least one resource group',
},
]}
className="cloud-account-setup-form__form-item"
>
<Select
mode="tags"
placeholder="e.g. prod-platform-rg"
tokenSeparators={[',']}
disabled={modalState === ModalStateEnum.WAITING}
/>
</Form.Item>
</div>
<RenderConnectionFields
isConnectionParamsLoading={isConnectionParamsLoading}
connectionParams={connectionParams}
isFormDisabled={modalState === ModalStateEnum.WAITING}
/>
{connectionCommands && (
<div className="cloud-account-setup-form__code-block-tabs-container">
<div className="cloud-account-setup-form__code-block-tabs-header">
<div className="cloud-account-setup-form__code-block-tabs-header-title">
Deploy Agent
</div>
<div className="cloud-account-setup-form__code-block-tabs-header-description">
{activeTab === 'cli' ? AZURE_CLI_DESC : AZURE_POWERSHELL_DESC}
</div>
</div>
<Tabs
className="cloud-account-setup-form__code-block-tabs"
items={[
{
key: 'cli',
label: 'CLI',
children: <CodeBlock code={connectionCommands?.cliCommand || ''} />,
},
{
key: 'powershell',
label: 'PowerShell',
children: (
<CodeBlock
code={connectionCommands?.cloudPowerShellCommand || ''}
/>
),
},
]}
value={activeTab}
onChange={(key): void => setActiveTab(key)}
variant="primary"
/>
</div>
)}
{renderAlert()}
{modalState === ModalStateEnum.WAITING && (
<div className="cloud-account-setup-status-message">
After running the command, return here and wait for automatic connection
detection.
</div>
)}
</div>
</Form>
</div>
</DrawerWrapper>
);
}
export default CloudAccountSetupModal;

View File

@@ -0,0 +1,150 @@
import { Dispatch, SetStateAction, useMemo } from 'react';
import { useQueryClient } from 'react-query';
import { Button, DrawerWrapper } from '@signozhq/ui';
import { Form, Select } from 'antd';
import { invalidateListAccounts } from 'api/generated/services/cloudintegration';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import { CloudAccount } from 'container/Integrations/types';
import { Save } from 'lucide-react';
import { useAccountSettingsModal } from '../../../../../hooks/integration/azure/useAccountSettingsModal';
import RemoveIntegrationAccount from '../../RemoveAccount/RemoveIntegrationAccount';
import '../../AmazonWebServices/EditAccount/AccountSettingsModal.style.scss';
interface AccountSettingsModalProps {
onClose: () => void;
account: CloudAccount;
setActiveAccount: Dispatch<SetStateAction<CloudAccount | null>>;
}
function AccountSettingsModal({
onClose,
account,
setActiveAccount,
}: AccountSettingsModalProps): JSX.Element {
const {
form,
isLoading,
resourceGroups,
isSaveDisabled,
setResourceGroups,
handleSubmit,
handleClose,
} = useAccountSettingsModal({ onClose, account, setActiveAccount });
const queryClient = useQueryClient();
const azureConfig = useMemo(
() => ('deployment_region' in account.config ? account.config : null),
[account.config],
);
return (
<DrawerWrapper
open={true}
className="account-settings-modal"
title="Account Settings"
direction="right"
showCloseButton
onOpenChange={(open): void => {
if (!open) {
handleClose();
}
}}
width="wide"
footer={
<div className="account-settings-modal__footer">
<RemoveIntegrationAccount
accountId={account?.id}
onRemoveIntegrationAccountSuccess={(): void => {
void invalidateListAccounts(queryClient, {
cloudProvider: INTEGRATION_TYPES.AZURE,
});
setActiveAccount(null);
handleClose();
}}
cloudProvider={INTEGRATION_TYPES.AZURE}
/>
<Button
variant="solid"
color="secondary"
disabled={isSaveDisabled}
onClick={handleSubmit}
loading={isLoading}
prefix={<Save size={14} />}
>
Update Changes
</Button>
</div>
}
>
<Form
form={form}
layout="vertical"
initialValues={{
resourceGroups: azureConfig?.resource_groups || [],
}}
>
<div className="account-settings-modal__body">
<div className="account-settings-modal__body-account-info">
<div className="account-settings-modal__body-account-info-connected-account-details">
<div className="account-settings-modal__body-account-info-connected-account-details-title">
Connected Account details
</div>
<div className="account-settings-modal__body-account-info-connected-account-details-account-id">
Azure Subscription:{' '}
<span className="account-settings-modal__body-account-info-connected-account-details-account-id-account-id">
{account?.providerAccountId}
</span>
</div>
</div>
</div>
{azureConfig?.deployment_region && (
<div className="account-settings-modal__body-region-selector">
<div className="account-settings-modal__body-region-selector-title">
Deployment region
</div>
<div className="account-settings-modal__body-region-selector-description">
{azureConfig.deployment_region}
</div>
</div>
)}
<div className="account-settings-modal__body-region-selector">
<div className="account-settings-modal__body-region-selector-title">
Resource groups
</div>
<div className="account-settings-modal__body-region-selector-description">
Update the resource groups that should be monitored.
</div>
<Form.Item
name="resourceGroups"
rules={[
{
required: true,
type: 'array',
min: 1,
message: 'Please add at least one resource group',
},
]}
>
<Select
mode="tags"
value={resourceGroups}
onChange={(values): void => {
setResourceGroups(values);
form.setFieldValue('resourceGroups', values);
}}
/>
</Form.Item>
</div>
</div>
</Form>
</DrawerWrapper>
);
}
export default AccountSettingsModal;

View File

@@ -1,16 +1,15 @@
import { IntegrationType } from 'container/Integrations/types';
import AWSTabs from './AmazonWebServices/ServicesTabs';
import Header from './Header/Header';
import ServicesTabs from './ServiceTabs/ServicesTabs';
import './CloudIntegration.styles.scss';
const CloudIntegration = ({ type }: { type: IntegrationType }): JSX.Element => {
return (
<div className="cloud-integration-container">
<Header title={type} />
{type === IntegrationType.AWS_SERVICES && <AWSTabs />}
<Header type={type} />
<ServicesTabs type={type} />
</div>
);
};

View File

@@ -7,7 +7,7 @@ import { Blocks, LifeBuoy } from 'lucide-react';
import './Header.styles.scss';
function Header({ title }: { title: IntegrationType }): JSX.Element {
function Header({ type }: { type: IntegrationType }): JSX.Element {
return (
<div className="cloud-header">
<div className="cloud-header__navigation">
@@ -25,27 +25,30 @@ function Header({ title }: { title: IntegrationType }): JSX.Element {
),
},
{
title: <div className="cloud-header__breadcrumb-title">{title}</div>,
title: <div className="cloud-header__breadcrumb-title">{type}</div>,
},
]}
/>
</div>
<div className="cloud-header__actions">
<Button
variant="solid"
size="sm"
color="secondary"
onClick={(): void => {
window.open(
'https://signoz.io/blog/native-aws-integrations-with-autodiscovery/',
'_blank',
);
}}
prefix={<LifeBuoy size={12} />}
>
Get Help
</Button>
</div>
{type === IntegrationType.AWS_SERVICES && (
<div className="cloud-header__actions">
<Button
variant="solid"
size="sm"
color="secondary"
onClick={(): void => {
window.open(
'https://signoz.io/blog/native-aws-integrations-with-autodiscovery/',
'_blank',
);
}}
prefix={<LifeBuoy size={12} />}
>
Get Help
</Button>
</div>
)}
</div>
);
}

View File

@@ -1,9 +1,16 @@
.remove-integration-account-modal {
&__cloud-provider {
color: var(--l1-foreground);
font-weight: 500;
font-size: 14px;
line-height: 20px;
letter-spacing: -0.07px;
}
.ant-modal-content {
background-color: var(--l1-background);
border: 1px solid var(--l3-background);
border-radius: 4px;
padding: 12px;
}
.ant-modal-close {

View File

@@ -4,16 +4,21 @@ import { Modal } from 'antd/lib';
import logEvent from 'api/common/logEvent';
import { useDisconnectAccount } from 'api/generated/services/cloudintegration';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { INTEGRATION_TELEMETRY_EVENTS } from 'container/Integrations/constants';
import {
INTEGRATION_TELEMETRY_EVENTS,
INTEGRATION_TYPES,
} from 'container/Integrations/constants';
import { useNotifications } from 'hooks/useNotifications';
import { Unlink } from 'lucide-react';
import './RemoveIntegrationAccount.scss';
function RemoveIntegrationAccount({
cloudProvider,
accountId,
onRemoveIntegrationAccountSuccess,
}: {
cloudProvider: string;
accountId: string;
onRemoveIntegrationAccountSuccess: () => void;
}): JSX.Element {
@@ -39,12 +44,13 @@ function RemoveIntegrationAccount({
},
});
const handleOk = (): void => {
logEvent(INTEGRATION_TELEMETRY_EVENTS.AWS_INTEGRATION_ACCOUNT_REMOVED, {
logEvent(INTEGRATION_TELEMETRY_EVENTS.INTEGRATION_ACCOUNT_REMOVED, {
accountId,
integration: cloudProvider,
});
disconnectAccount({
pathParams: {
cloudProvider: 'aws',
cloudProvider,
id: accountId,
},
});
@@ -78,13 +84,28 @@ function RemoveIntegrationAccount({
loading: isRemoveIntegrationLoading,
}}
>
Removing this account will remove all components created for sending
telemetry to SigNoz in your AWS account within the next ~15 minutes
(cloudformation stacks named signoz-integration-telemetry-collection in
enabled regions). <br />
<br />
After that, you can delete the cloudformation stack that was created
manually when connecting this account.
{cloudProvider === INTEGRATION_TYPES.AWS ? (
<>
Removing this account will remove all components created for sending
telemetry to SigNoz in your AWS account within the next ~15 minutes
(cloudformation stacks named signoz-integration-telemetry-collection in
enabled regions). <br />
<br />
After that, you can delete the cloudformation stack that was created
manually when connecting this account.
</>
) : (
<>
Removing this account will remove all components created for sending
telemetry to SigNoz in your Azure subscription within the next ~15 minutes
(deployment stack named signoz-integration-telemetry will be deleted
automatically). <br />
<br />
After that, you have to manually delete &apos;signoz-integration&apos;
deployment stack that was created while connecting this account (Takes ~20
minutes to delete).
</>
)}
</Modal>
</div>
);

View File

@@ -1,7 +1,7 @@
import cx from 'classnames';
import LineClampedText from 'periscope/components/LineClampedText/LineClampedText';
import { Service } from './types';
import { Service } from './AmazonWebServices/types';
function ServiceItem({
service,

View File

@@ -0,0 +1,30 @@
import { IntegrationType } from 'container/Integrations/types';
import useUrlQuery from 'hooks/useUrlQuery';
import HeroSection from '../../HeroSection/HeroSection';
import ServiceDetails from '../AmazonWebServices/ServiceDetails/ServiceDetails';
import ServicesList from '../ServicesList';
import './ServicesTabs.style.scss';
function ServicesTabs({ type }: { type: IntegrationType }): JSX.Element {
const urlQuery = useUrlQuery();
const cloudAccountId = urlQuery.get('cloudAccountId') || '';
return (
<div className="services-tabs">
<HeroSection type={type} />
<div className="services-section">
<div className="services-section__sidebar">
<ServicesList cloudAccountId={cloudAccountId} type={type} />
</div>
<div className="services-section__content">
<ServiceDetails type={type} />
</div>
</div>
</div>
);
}
export default ServicesTabs;

View File

@@ -4,15 +4,20 @@ import { Skeleton } from 'antd';
import { useListServicesMetadata } from 'api/generated/services/cloudintegration';
import type { CloudintegrationtypesServiceMetadataDTO } from 'api/generated/services/sigNoz.schemas';
import cx from 'classnames';
import { IntegrationType } from 'container/Integrations/types';
import useUrlQuery from 'hooks/useUrlQuery';
import emptyStateIconUrl from '@/assets/Icons/emptyState.svg';
interface ServicesListProps {
cloudAccountId: string;
type: IntegrationType;
}
function ServicesList({ cloudAccountId }: ServicesListProps): JSX.Element {
function ServicesList({
cloudAccountId,
type,
}: ServicesListProps): JSX.Element {
const urlQuery = useUrlQuery();
const navigate = useNavigate();
const hasValidCloudAccountId = Boolean(cloudAccountId);
@@ -22,7 +27,7 @@ function ServicesList({ cloudAccountId }: ServicesListProps): JSX.Element {
const { data: servicesMetadata, isLoading } = useListServicesMetadata(
{
cloudProvider: 'aws',
cloudProvider: type,
},
serviceQueryParams,
);

View File

@@ -0,0 +1,49 @@
import { CloudintegrationtypesAccountDTO } from 'api/generated/services/sigNoz.schemas';
import { CloudAccount as IntegrationCloudAccount } from 'container/Integrations/types';
import { CloudAccount as AwsCloudAccount } from './AmazonWebServices/types';
export function mapAccountDtoToAwsCloudAccount(
account: CloudintegrationtypesAccountDTO,
): AwsCloudAccount | null {
if (!account.providerAccountId) {
return null;
}
return {
id: account.id,
cloud_account_id: account.id,
config: {
regions: account.config?.aws?.regions ?? [],
},
status: {
integration: {
last_heartbeat_ts_ms: account.agentReport?.timestampMillis ?? 0,
},
},
providerAccountId: account.providerAccountId,
};
}
export function mapAccountDtoToAzureCloudAccount(
account: CloudintegrationtypesAccountDTO,
): IntegrationCloudAccount | null {
if (!account.providerAccountId) {
return null;
}
return {
id: account.id,
cloud_account_id: account.id,
config: {
deployment_region: account.config?.azure?.deploymentRegion ?? '',
resource_groups: account.config?.azure?.resourceGroups ?? [],
},
status: {
integration: {
last_heartbeat_ts_ms: account.agentReport?.timestampMillis ?? 0,
},
},
providerAccountId: account.providerAccountId,
};
}

View File

@@ -1,5 +1,32 @@
import { ONE_CLICK_INTEGRATIONS } from '../constants';
import { IntegrationType } from '../types';
export const getAccountById = <T extends { cloud_account_id: string }>(
accounts: T[],
accountId: string,
): T | null =>
accounts.find((account) => account.cloud_account_id === accountId) || null;
interface IntegrationMetadata {
title: string;
description: string;
logo: string;
}
export const getIntegrationMetadata = (
type: IntegrationType,
): IntegrationMetadata => {
const integration = ONE_CLICK_INTEGRATIONS.find(
(integration) => integration.id === type,
);
if (!integration) {
return { title: '', description: '', logo: '' };
}
return {
title: integration.title,
description: integration.description,
logo: integration.icon,
};
};

View File

@@ -0,0 +1,33 @@
import { IntegrationType } from 'container/Integrations/types';
import AccountActions from '../CloudIntegration/AmazonWebServices/AccountActions/AccountActions';
import { getIntegrationMetadata } from '../CloudIntegration/utils';
import './HeroSection.style.scss';
function HeroSection({ type }: { type: IntegrationType }): JSX.Element {
const {
title,
description,
logo: integrationLogo,
} = getIntegrationMetadata(type);
return (
<div className="hero-section">
<div className="hero-section__details">
<div className="hero-section__details-header">
<div className="hero-section__icon">
<img src={integrationLogo} alt={type} />
</div>
<div className="hero-section__details-title">{title}</div>
</div>
<div className="hero-section__details-description">{description}</div>
</div>
<AccountActions type={type} />
</div>
);
}
export default HeroSection;

View File

@@ -9,53 +9,6 @@
flex-direction: column;
gap: 16px;
.error-container {
display: flex;
border-radius: 6px;
border: 1px solid var(--l1-border);
background: var(--l1-background);
align-items: center;
justify-content: center;
flex-direction: column;
.error-content {
display: flex;
flex-direction: column;
justify-content: center;
height: 300px;
gap: 15px;
.error-btns {
display: flex;
flex-direction: row;
gap: 16px;
align-items: center;
.retry-btn {
display: flex;
align-items: center;
}
.contact-support {
display: flex;
align-items: center;
gap: 4px;
cursor: pointer;
.text {
color: var(--callout-primary-description);
font-weight: 500;
}
}
}
.error-state-svg {
height: 40px;
width: 40px;
}
}
}
.loading-integration-details {
display: flex;
flex-direction: column;
@@ -327,6 +280,36 @@
}
}
}
.error-container {
display: flex;
border-radius: 6px;
border: 1px solid var(--l1-border);
background: var(--l1-background);
align-items: center;
justify-content: center;
flex-direction: column;
.error-content {
display: flex;
flex-direction: column;
justify-content: center;
height: 300px;
gap: 15px;
.error-btns {
display: flex;
flex-direction: row;
gap: 12px;
align-items: center;
}
.error-state-svg {
height: 40px;
width: 40px;
}
}
}
}
.remove-integration-modal {

View File

@@ -1,6 +1,5 @@
import { useState } from 'react';
import { useHistory, useParams } from 'react-router-dom';
import { Color } from '@signozhq/design-tokens';
import { Button } from '@signozhq/ui';
import { Flex, Skeleton, Typography } from 'antd';
import ROUTES from 'constants/routes';
@@ -55,8 +54,19 @@ function IntegrationDetailPage(): JSX.Element {
),
);
if (integrationId === INTEGRATION_TYPES.AWS) {
return <CloudIntegration type={IntegrationType.AWS_SERVICES} />;
if (
integrationId === INTEGRATION_TYPES.AWS ||
integrationId === INTEGRATION_TYPES.AZURE
) {
return (
<CloudIntegration
type={
integrationId === INTEGRATION_TYPES.AWS
? IntegrationType.AWS_SERVICES
: IntegrationType.AZURE_SERVICES
}
/>
);
}
return (
@@ -85,20 +95,20 @@ function IntegrationDetailPage(): JSX.Element {
<div className="error-btns">
<Button
variant="solid"
color="primary"
color="secondary"
onClick={(): Promise<any> => refetch()}
prefix={<RotateCw size={14} />}
>
Retry
</Button>
<div
className="contact-support"
<Button
variant="solid"
color="secondary"
onClick={(): void => handleContactSupport(isCloudUserVal)}
suffix={<MoveUpRight size={12} />}
>
<Typography.Link className="text">Contact Support </Typography.Link>
<MoveUpRight size={14} color={Color.BG_ROBIN_400} />
</div>
Contact Support
</Button>
</div>
</div>
</div>

View File

@@ -22,6 +22,7 @@ function OneClickIntegrations(props: OneClickIntegrationsProps): JSX.Element {
if (!query) {
return ONE_CLICK_INTEGRATIONS;
}
return ONE_CLICK_INTEGRATIONS.filter(
(integration) =>
integration.title.toLowerCase().includes(query) ||

View File

@@ -14,8 +14,8 @@ export const INTEGRATION_TELEMETRY_EVENTS = {
'Integrations Detail Page: Clicked remove Integration button for integration',
INTEGRATIONS_DETAIL_CONFIGURE_INSTRUCTION:
'Integrations Detail Page: Navigated to configure an integration',
AWS_INTEGRATION_ACCOUNT_REMOVED:
'AWS Integration Detail page: Clicked remove Integration button for integration',
INTEGRATION_ACCOUNT_REMOVED:
'Integration Detail page: Clicked remove Integration button for integration',
};
export const INTEGRATION_TYPES = {
@@ -53,7 +53,7 @@ export const AZURE_INTEGRATION = {
is_new: true,
};
export const ONE_CLICK_INTEGRATIONS = [AWS_INTEGRATION];
export const ONE_CLICK_INTEGRATIONS = [AWS_INTEGRATION, AZURE_INTEGRATION];
export const AZURE_REGIONS: AzureRegion[] = [
{
@@ -81,6 +81,7 @@ export const AZURE_REGIONS: AzureRegion[] = [
{ label: 'Central India', value: 'centralindia', geography: 'India' },
{ label: 'Central US', value: 'centralus', geography: 'United States' },
{ label: 'Chile Central', value: 'chilecentral', geography: 'Chile' },
{ label: 'Denmark East', value: 'denmarkeast', geography: 'Denmark' },
{ label: 'East Asia', value: 'eastasia', geography: 'Asia Pacific' },
{ label: 'East US', value: 'eastus', geography: 'United States' },
{ label: 'East US 2', value: 'eastus2', geography: 'United States' },

View File

@@ -4,8 +4,8 @@ import {
} from './CloudIntegration/AmazonWebServices/types';
export enum IntegrationType {
AWS_SERVICES = 'aws-services',
AZURE_SERVICES = 'azure-services',
AWS_SERVICES = 'aws',
AZURE_SERVICES = 'azure',
}
interface LogField {
@@ -89,6 +89,7 @@ export interface CloudAccount {
cloud_account_id: string;
config: AzureCloudAccountConfig | AWSCloudAccountConfig;
status: AccountStatus | IServiceStatus;
providerAccountId: string;
}
export interface AzureCloudAccountConfig {

View File

@@ -7,6 +7,13 @@ import {
GetIntegrationStatusProps,
} from 'types/api/integrations/types';
export function isOneClickIntegration(integrationId: string): boolean {
return (
integrationId === INTEGRATION_TYPES.AWS ||
integrationId === INTEGRATION_TYPES.AZURE
);
}
export const useGetIntegrationStatus = ({
integrationId,
}: GetIntegrationPayloadProps): UseQueryResult<
@@ -20,5 +27,5 @@ export const useGetIntegrationStatus = ({
enabled:
!!integrationId &&
integrationId !== '' &&
integrationId !== INTEGRATION_TYPES.AWS,
!isOneClickIntegration(integrationId),
});

View File

@@ -20,11 +20,11 @@ import {
CloudintegrationtypesCredentialsDTO,
CloudintegrationtypesPostableAccountDTO,
} from 'api/generated/services/sigNoz.schemas';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import {
ActiveViewEnum,
ModalStateEnum,
} from 'container/Integrations/CloudIntegration/AmazonWebServices/HeroSection/types';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
} from 'container/Integrations/HeroSection/types';
import useAxiosError from 'hooks/useAxiosError';
import { regions } from 'utils/regions';

View File

@@ -0,0 +1,142 @@
import {
Dispatch,
SetStateAction,
useCallback,
useEffect,
useMemo,
useState,
} from 'react';
import { toast } from '@signozhq/ui';
import { Form } from 'antd';
import { FormInstance } from 'antd/lib';
import { useUpdateAccount } from 'api/generated/services/cloudintegration';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import { CloudAccount } from 'container/Integrations/types';
import { isEqual } from 'lodash-es';
import logEvent from '../../../api/common/logEvent';
interface UseAccountSettingsModalProps {
onClose: () => void;
account: CloudAccount;
setActiveAccount: Dispatch<SetStateAction<CloudAccount | null>>;
}
interface UseAccountSettingsModal {
form: FormInstance;
isLoading: boolean;
resourceGroups: string[];
isSaveDisabled: boolean;
setResourceGroups: Dispatch<SetStateAction<string[]>>;
handleSubmit: () => Promise<void>;
handleClose: () => void;
}
export function useAccountSettingsModal({
onClose,
account,
setActiveAccount,
}: UseAccountSettingsModalProps): UseAccountSettingsModal {
const [form] = Form.useForm();
const { mutate: updateAccount, isLoading } = useUpdateAccount();
const accountConfig = useMemo(
() => ('deployment_region' in account.config ? account.config : null),
[account.config],
);
const [resourceGroups, setResourceGroups] = useState<string[]>(
accountConfig?.resource_groups || [],
);
useEffect(() => {
if (!accountConfig) {
return;
}
form.setFieldsValue({
region: accountConfig.deployment_region,
resourceGroups: accountConfig.resource_groups,
});
setResourceGroups(accountConfig.resource_groups);
}, [accountConfig, form]);
const handleSubmit = useCallback(async (): Promise<void> => {
try {
const values = await form.validateFields();
updateAccount(
{
pathParams: {
cloudProvider: INTEGRATION_TYPES.AZURE,
id: account?.id || '',
},
data: {
config: {
azure: {
resourceGroups: values.resourceGroups || [],
},
},
},
},
{
onSuccess: () => {
const nextConfig = {
deployment_region: accountConfig?.deployment_region || '',
resource_groups: values.resourceGroups || [],
};
setActiveAccount({
...account,
config: nextConfig,
});
onClose();
toast.success('Account settings updated successfully', {
position: 'bottom-right',
});
logEvent('Azure Integration: Account settings updated', {
cloudAccountId: account.cloud_account_id,
deploymentRegion: nextConfig.deployment_region,
resourceGroups: nextConfig.resource_groups,
});
},
onError: (error) => {
toast.error('Failed to update account settings', {
description: error?.message,
position: 'bottom-right',
});
},
},
);
} catch (error) {
console.error('Form submission failed:', error);
}
}, [form, updateAccount, account, setActiveAccount, onClose]);
const isSaveDisabled = useMemo(() => {
if (!accountConfig) {
return true;
}
const formResourceGroups = resourceGroups || [];
return isEqual(
[...formResourceGroups].sort(),
[...accountConfig.resource_groups].sort(),
);
}, [accountConfig, resourceGroups, form]);
const handleClose = useCallback(() => {
onClose();
}, [onClose]);
return {
form,
isLoading,
resourceGroups,
isSaveDisabled,
setResourceGroups,
handleSubmit,
handleClose,
};
}

View File

@@ -0,0 +1,188 @@
import { Dispatch, SetStateAction, useCallback, useState } from 'react';
import { useQueryClient } from 'react-query';
import { toast } from '@signozhq/ui';
import { Form, FormInstance } from 'antd';
import {
CreateAccountMutationResult,
GetConnectionCredentialsQueryResult,
invalidateListAccounts,
useCreateAccount,
useGetConnectionCredentials,
} from 'api/generated/services/cloudintegration';
import {
CloudintegrationtypesCredentialsDTO,
CloudintegrationtypesPostableAccountDTO,
} from 'api/generated/services/sigNoz.schemas';
import { INTEGRATION_TYPES } from 'container/Integrations/constants';
import { ModalStateEnum } from 'container/Integrations/HeroSection/types';
import useAxiosError from 'hooks/useAxiosError';
import logEvent from '../../../api/common/logEvent';
interface UseIntegrationModalProps {
onClose: () => void;
}
interface UseAzureIntegrationModal {
form: FormInstance;
modalState: ModalStateEnum;
isLoading: boolean;
accountId?: string;
connectionCommands: {
cliCommand: string;
cloudPowerShellCommand: string;
} | null;
setModalState: Dispatch<SetStateAction<ModalStateEnum>>;
handleSubmit: () => Promise<void>;
handleClose: () => void;
connectionParams?: CloudintegrationtypesCredentialsDTO;
isConnectionParamsLoading: boolean;
handleConnectionSuccess: (payload: {
cloudAccountId: string;
status?: unknown;
}) => void;
handleConnectionTimeout: (payload: { id?: string }) => void;
handleConnectionError: () => void;
}
export function useIntegrationModal({
onClose,
}: UseIntegrationModalProps): UseAzureIntegrationModal {
const queryClient = useQueryClient();
const [form] = Form.useForm();
const [modalState, setModalState] = useState<ModalStateEnum>(
ModalStateEnum.FORM,
);
const [isLoading, setIsLoading] = useState(false);
const [accountId, setAccountId] = useState<string | undefined>(undefined);
const [connectionCommands, setConnectionCommands] = useState<{
cliCommand: string;
cloudPowerShellCommand: string;
} | null>(null);
const handleClose = useCallback((): void => {
setModalState(ModalStateEnum.FORM);
setConnectionCommands(null);
onClose();
}, [onClose]);
const handleConnectionSuccess = useCallback(
(payload: { cloudAccountId: string; status?: unknown }): void => {
logEvent('Azure Integration: Account connected', {
cloudAccountId: payload.cloudAccountId,
status: payload.status,
});
toast.success('Azure account connected successfully', {
position: 'bottom-right',
});
void invalidateListAccounts(queryClient, {
cloudProvider: INTEGRATION_TYPES.AZURE,
});
handleClose();
},
[handleClose, queryClient],
);
const handleConnectionTimeout = useCallback(
(payload: { id?: string }): void => {
setModalState(ModalStateEnum.ERROR);
logEvent('Azure Integration: Account connection attempt timed out', {
id: payload.id,
});
},
[],
);
const handleConnectionError = useCallback((): void => {
setModalState(ModalStateEnum.ERROR);
}, []);
const { mutate: createAccount } = useCreateAccount();
const handleError = useAxiosError();
const { data: connectionParams, isLoading: isConnectionParamsLoading } =
useGetConnectionCredentials<GetConnectionCredentialsQueryResult>(
{
cloudProvider: INTEGRATION_TYPES.AZURE,
},
{
query: {
onError: handleError,
},
},
);
const handleSubmit = useCallback(async (): Promise<void> => {
try {
setIsLoading(true);
const values = await form.validateFields();
const payload: CloudintegrationtypesPostableAccountDTO = {
config: {
azure: {
deploymentRegion: values.region,
resourceGroups: values.resourceGroups || [],
},
},
credentials: {
ingestionUrl: connectionParams?.data?.ingestionUrl || values.ingestionUrl,
ingestionKey: connectionParams?.data?.ingestionKey || values.ingestionKey,
sigNozApiUrl: connectionParams?.data?.sigNozApiUrl || values.sigNozApiUrl,
sigNozApiKey: connectionParams?.data?.sigNozApiKey || values.sigNozApiKey,
},
};
createAccount(
{
pathParams: { cloudProvider: INTEGRATION_TYPES.AZURE },
data: payload,
},
{
onSuccess: (response: CreateAccountMutationResult) => {
const nextAccountId = response.data.id;
const artifact = response.data.connectionArtifact.azure;
logEvent('Azure Integration: Account connection commands generated', {
id: nextAccountId,
});
setConnectionCommands({
cliCommand: artifact?.cliCommand || '',
cloudPowerShellCommand: artifact?.cloudPowerShellCommand || '',
});
setModalState(ModalStateEnum.WAITING);
setAccountId(nextAccountId);
},
onError: () => {
setModalState(ModalStateEnum.ERROR);
toast.error('Failed to create account connection', {
position: 'bottom-right',
});
},
},
);
} catch (error) {
console.error('Form submission failed:', error);
} finally {
setIsLoading(false);
}
}, [form, connectionParams, createAccount]);
return {
form,
modalState,
isLoading,
accountId,
connectionCommands,
setModalState,
handleSubmit,
handleClose,
connectionParams: connectionParams?.data as
| CloudintegrationtypesCredentialsDTO
| undefined,
isConnectionParamsLoading,
handleConnectionSuccess,
handleConnectionTimeout,
handleConnectionError,
};
}

View File

@@ -48,5 +48,43 @@ func (provider *provider) addInfraMonitoringRoutes(router *mux.Router) error {
return err
}
if err := router.Handle("/api/v2/infra_monitoring/nodes", handler.New(
provider.authZ.ViewAccess(provider.infraMonitoringHandler.ListNodes),
handler.OpenAPIDef{
ID: "ListNodes",
Tags: []string{"inframonitoring"},
Summary: "List Nodes for Infra Monitoring",
Description: "Returns a paginated list of Kubernetes nodes with key metrics: CPU usage, CPU allocatable, memory working set, memory allocatable, and per-group readyNodesCount / notReadyNodesCount derived from each node's latest k8s.node.condition_ready value in the window. Each node includes metadata attributes (k8s.node.uid, k8s.cluster.name). The response type is 'list' for the default k8s.node.name grouping (each row is one node with its current condition string: ready / not_ready / '') or 'grouped_list' for custom groupBy keys (each row aggregates nodes in the group with readyNodesCount and notReadyNodesCount; condition stays empty). Supports filtering via a filter expression, custom groupBy, ordering by cpu / cpu_allocatable / memory / memory_allocatable, and pagination via offset/limit. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (nodeCPU, nodeCPUAllocatable, nodeMemory, nodeMemoryAllocatable) return -1 as a sentinel when no data is available for that field.",
Request: new(inframonitoringtypes.PostableNodes),
RequestContentType: "application/json",
Response: new(inframonitoringtypes.Nodes),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v2/infra_monitoring/namespaces", handler.New(
provider.authZ.ViewAccess(provider.infraMonitoringHandler.ListNamespaces),
handler.OpenAPIDef{
ID: "ListNamespaces",
Tags: []string{"inframonitoring"},
Summary: "List Namespaces for Infra Monitoring",
Description: "Returns a paginated list of Kubernetes namespaces with key aggregated pod metrics: CPU usage and memory working set (summed across pods in the group), plus per-group pod counts bucketed by each pod's latest k8s.pod.phase value in the window (pendingPodCount, runningPodCount, succeededPodCount, failedPodCount, unknownPodCount). Each namespace includes metadata attributes (k8s.namespace.name, k8s.cluster.name). The response type is 'list' for the default k8s.namespace.name grouping or 'grouped_list' for custom groupBy keys; in both modes every row aggregates pods in the group. Supports filtering via a filter expression, custom groupBy, ordering by cpu / memory, and pagination via offset/limit. Also reports missing required metrics and whether the requested time range falls before the data retention boundary. Numeric metric fields (namespaceCPU, namespaceMemory) return -1 as a sentinel when no data is available for that field.",
Request: new(inframonitoringtypes.PostableNamespaces),
RequestContentType: "application/json",
Response: new(inframonitoringtypes.Namespaces),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -25,7 +25,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/modules/spanmapper"
"github.com/SigNoz/signoz/pkg/modules/tracedetail"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -52,7 +51,6 @@ type provider struct {
flaggerHandler flagger.Handler
dashboardModule dashboard.Module
dashboardHandler dashboard.Handler
systemDashboardHandler systemdashboard.Handler
metricsExplorerHandler metricsexplorer.Handler
infraMonitoringHandler inframonitoring.Handler
gatewayHandler gateway.Handler
@@ -84,7 +82,6 @@ func NewFactory(
flaggerHandler flagger.Handler,
dashboardModule dashboard.Module,
dashboardHandler dashboard.Handler,
systemDashboardHandler systemdashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
infraMonitoringHandler inframonitoring.Handler,
gatewayHandler gateway.Handler,
@@ -119,7 +116,6 @@ func NewFactory(
flaggerHandler,
dashboardModule,
dashboardHandler,
systemDashboardHandler,
metricsExplorerHandler,
infraMonitoringHandler,
gatewayHandler,
@@ -156,7 +152,6 @@ func newProvider(
flaggerHandler flagger.Handler,
dashboardModule dashboard.Module,
dashboardHandler dashboard.Handler,
systemDashboardHandler systemdashboard.Handler,
metricsExplorerHandler metricsexplorer.Handler,
infraMonitoringHandler inframonitoring.Handler,
gatewayHandler gateway.Handler,
@@ -191,7 +186,6 @@ func newProvider(
flaggerHandler: flaggerHandler,
dashboardModule: dashboardModule,
dashboardHandler: dashboardHandler,
systemDashboardHandler: systemDashboardHandler,
metricsExplorerHandler: metricsExplorerHandler,
infraMonitoringHandler: infraMonitoringHandler,
gatewayHandler: gatewayHandler,
@@ -260,10 +254,6 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
return err
}
if err := provider.addSystemDashboardRoutes(router); err != nil {
return err
}
if err := provider.addMetricsExplorerRoutes(router); err != nil {
return err
}

View File

@@ -1,66 +0,0 @@
package signozapiserver
import (
"net/http"
"github.com/gorilla/mux"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
)
func (provider *provider) addSystemDashboardRoutes(router *mux.Router) error {
if err := router.Handle("/api/v1/system/{source}", handler.New(provider.authZ.ViewAccess(provider.systemDashboardHandler.Get), handler.OpenAPIDef{
ID: "GetSystemDashboard",
Tags: []string{"systemdashboard"},
Summary: "Get system dashboard",
Description: "This endpoint returns the system dashboard for the callers org keyed by source (e.g. ai-o11y-overview).",
Request: nil,
RequestContentType: "",
Response: new(dashboardtypes.GettableDashboard),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
})).Methods(http.MethodGet).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/system/{source}", handler.New(provider.authZ.EditAccess(provider.systemDashboardHandler.Update), handler.OpenAPIDef{
ID: "UpdateSystemDashboard",
Tags: []string{"systemdashboard"},
Summary: "Update system dashboard",
Description: "This endpoint replaces the system dashboard for the callers org with the provided payload.",
Request: new(dashboardtypes.UpdatableDashboard),
RequestContentType: "application/json",
Response: new(dashboardtypes.GettableDashboard),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
})).Methods(http.MethodPut).GetError(); err != nil {
return err
}
if err := router.Handle("/api/v1/system/{source}/reset", handler.New(provider.authZ.EditAccess(provider.systemDashboardHandler.Reset), handler.OpenAPIDef{
ID: "ResetSystemDashboard",
Tags: []string{"systemdashboard"},
Summary: "Reset system dashboard to defaults",
Description: "This resets edited/updated system dashboard to default system dashboard.",
Request: nil,
RequestContentType: "",
Response: new(dashboardtypes.GettableDashboard),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
Deprecated: false,
SecuritySchemes: newSecuritySchemes(types.RoleEditor),
})).Methods(http.MethodPost).GetError(); err != nil {
return err
}
return nil
}

View File

@@ -22,7 +22,7 @@ func newConfig() factory.Config {
Agent: AgentConfig{
// we will maintain the latest version of cloud integration agent from here,
// till we automate it externally or figure out a way to validate it.
Version: "v0.0.9",
Version: "v0.0.10",
},
}
}

View File

@@ -38,7 +38,7 @@ func NewModule(store dashboardtypes.Store, settings factory.ProviderSettings, an
}
func (module *module) Create(ctx context.Context, orgID valuer.UUID, createdBy string, creator valuer.UUID, postableDashboard dashboardtypes.PostableDashboard) (*dashboardtypes.Dashboard, error) {
dashboard, err := dashboardtypes.NewDashboard(orgID, createdBy, postableDashboard, "")
dashboard, err := dashboardtypes.NewDashboard(orgID, createdBy, postableDashboard)
if err != nil {
return nil, err
}

View File

@@ -21,7 +21,7 @@ func NewStore(sqlstore sqlstore.SQLStore) dashboardtypes.Store {
func (store *store) Create(ctx context.Context, storabledashboard *dashboardtypes.StorableDashboard) error {
_, err := store.
sqlstore.
BunDBCtx(ctx).
BunDB().
NewInsert().
Model(storabledashboard).
Exec(ctx)
@@ -55,7 +55,6 @@ func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID)
Model(storableDashboard).
Where("id = ?", id).
Where("org_id = ?", orgID).
Where("source = ?", "").
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "dashboard with id %s doesn't exist", id)
@@ -64,23 +63,6 @@ func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID)
return storableDashboard, nil
}
func (store *store) GetBySource(ctx context.Context, orgID valuer.UUID, source string) (*dashboardtypes.StorableDashboard, error) {
storableDashboard := new(dashboardtypes.StorableDashboard)
err := store.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(storableDashboard).
Where("org_id = ?", orgID).
Where("source = ?", source).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "system dashboard with source %s doesn't exist", source)
}
return storableDashboard, nil
}
func (store *store) GetPublic(ctx context.Context, dashboardID string) (*dashboardtypes.StorablePublicDashboard, error) {
storable := new(dashboardtypes.StorablePublicDashboard)
err := store.
@@ -142,7 +124,6 @@ func (store *store) List(ctx context.Context, orgID valuer.UUID) ([]*dashboardty
NewSelect().
Model(&storableDashboards).
Where("org_id = ?", orgID).
Where("source = ?", "").
Scan(ctx)
if err != nil {
return nil, err
@@ -169,16 +150,14 @@ func (store *store) ListPublic(ctx context.Context, orgID valuer.UUID) ([]*dashb
return storable, nil
}
// Update works for user dashboards (Source = "") and system dashboards (Source = "ai-o11y-overview").
func (store *store) Update(ctx context.Context, orgID valuer.UUID, storableDashboard *dashboardtypes.StorableDashboard) error {
_, err := store.
sqlstore.
BunDBCtx(ctx).
BunDB().
NewUpdate().
Model(storableDashboard).
WherePK().
Where("org_id = ?", orgID).
Where("source = ?", storableDashboard.Source).
Exec(ctx)
if err != nil {
return store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "dashboard with id %s doesn't exist", storableDashboard.ID)
@@ -210,7 +189,6 @@ func (store *store) Delete(ctx context.Context, orgID valuer.UUID, id valuer.UUI
Model(new(dashboardtypes.StorableDashboard)).
Where("id = ?", id).
Where("org_id = ?", orgID).
Where("source = ?", "").
Exec(ctx)
if err != nil {
return store.sqlstore.WrapNotFoundErrf(err, errors.CodeNotFound, "dashboard with id %s doesn't exist", id)

View File

@@ -69,3 +69,51 @@ func (h *handler) ListPods(rw http.ResponseWriter, req *http.Request) {
render.Success(rw, http.StatusOK, result)
}
func (h *handler) ListNodes(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
var parsedReq inframonitoringtypes.PostableNodes
if err := binding.JSON.BindBody(req.Body, &parsedReq); err != nil {
render.Error(rw, err)
return
}
result, err := h.module.ListNodes(req.Context(), orgID, &parsedReq)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, result)
}
func (h *handler) ListNamespaces(rw http.ResponseWriter, req *http.Request) {
claims, err := authtypes.ClaimsFromContext(req.Context())
if err != nil {
render.Error(rw, err)
return
}
orgID := valuer.MustNewUUID(claims.OrgID)
var parsedReq inframonitoringtypes.PostableNamespaces
if err := binding.JSON.BindBody(req.Body, &parsedReq); err != nil {
render.Error(rw, err)
return
}
result, err := h.module.ListNamespaces(req.Context(), orgID, &parsedReq)
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, result)
}

View File

@@ -23,3 +23,9 @@ type podPhaseCounts struct {
Failed int
Unknown int
}
// nodeConditionCounts holds per-group node counts bucketed by latest condition_ready in window.
type nodeConditionCounts struct {
Ready int
NotReady int
}

View File

@@ -242,3 +242,175 @@ func (m *module) ListPods(ctx context.Context, orgID valuer.UUID, req *inframoni
return resp, nil
}
func (m *module) ListNodes(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostableNodes) (*inframonitoringtypes.Nodes, error) {
if err := req.Validate(); err != nil {
return nil, err
}
resp := &inframonitoringtypes.Nodes{}
if req.OrderBy == nil {
req.OrderBy = &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: inframonitoringtypes.NodesOrderByCPU,
},
},
Direction: qbtypes.OrderDirectionDesc,
}
}
if len(req.GroupBy) == 0 {
req.GroupBy = []qbtypes.GroupByKey{nodeNameGroupByKey}
resp.Type = inframonitoringtypes.ResponseTypeList
} else {
resp.Type = inframonitoringtypes.ResponseTypeGroupedList
}
missingMetrics, minFirstReportedUnixMilli, err := m.getMetricsExistenceAndEarliestTime(ctx, nodesTableMetricNamesList)
if err != nil {
return nil, err
}
if len(missingMetrics) > 0 {
resp.RequiredMetricsCheck = inframonitoringtypes.RequiredMetricsCheck{MissingMetrics: missingMetrics}
resp.Records = []inframonitoringtypes.NodeRecord{}
resp.Total = 0
return resp, nil
}
if req.End < int64(minFirstReportedUnixMilli) {
resp.EndTimeBeforeRetention = true
resp.Records = []inframonitoringtypes.NodeRecord{}
resp.Total = 0
return resp, nil
}
resp.RequiredMetricsCheck = inframonitoringtypes.RequiredMetricsCheck{MissingMetrics: []string{}}
metadataMap, err := m.getNodesTableMetadata(ctx, req)
if err != nil {
return nil, err
}
resp.Total = len(metadataMap)
pageGroups, err := m.getTopNodeGroups(ctx, orgID, req, metadataMap)
if err != nil {
return nil, err
}
if len(pageGroups) == 0 {
resp.Records = []inframonitoringtypes.NodeRecord{}
return resp, nil
}
filterExpr := ""
if req.Filter != nil {
filterExpr = req.Filter.Expression
}
fullQueryReq := buildFullQueryRequest(req.Start, req.End, filterExpr, req.GroupBy, pageGroups, m.newNodesTableListQuery())
queryResp, err := m.querier.QueryRange(ctx, orgID, fullQueryReq)
if err != nil {
return nil, err
}
conditionCounts, err := m.getPerGroupNodeConditionCounts(ctx, req, pageGroups)
if err != nil {
return nil, err
}
isNodeNameInGroupBy := isKeyInGroupByAttrs(req.GroupBy, nodeNameAttrKey)
resp.Records = buildNodeRecords(isNodeNameInGroupBy, queryResp, pageGroups, req.GroupBy, metadataMap, conditionCounts)
resp.Warning = queryResp.Warning
return resp, nil
}
func (m *module) ListNamespaces(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostableNamespaces) (*inframonitoringtypes.Namespaces, error) {
if err := req.Validate(); err != nil {
return nil, err
}
resp := &inframonitoringtypes.Namespaces{}
if req.OrderBy == nil {
req.OrderBy = &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: inframonitoringtypes.NamespacesOrderByCPU,
},
},
Direction: qbtypes.OrderDirectionDesc,
}
}
if len(req.GroupBy) == 0 {
req.GroupBy = []qbtypes.GroupByKey{namespaceNameGroupByKey}
resp.Type = inframonitoringtypes.ResponseTypeList
} else {
resp.Type = inframonitoringtypes.ResponseTypeGroupedList
}
missingMetrics, minFirstReportedUnixMilli, err := m.getMetricsExistenceAndEarliestTime(ctx, namespacesTableMetricNamesList)
if err != nil {
return nil, err
}
if len(missingMetrics) > 0 {
resp.RequiredMetricsCheck = inframonitoringtypes.RequiredMetricsCheck{MissingMetrics: missingMetrics}
resp.Records = []inframonitoringtypes.NamespaceRecord{}
resp.Total = 0
return resp, nil
}
if req.End < int64(minFirstReportedUnixMilli) {
resp.EndTimeBeforeRetention = true
resp.Records = []inframonitoringtypes.NamespaceRecord{}
resp.Total = 0
return resp, nil
}
resp.RequiredMetricsCheck = inframonitoringtypes.RequiredMetricsCheck{MissingMetrics: []string{}}
metadataMap, err := m.getNamespacesTableMetadata(ctx, req)
if err != nil {
return nil, err
}
resp.Total = len(metadataMap)
pageGroups, err := m.getTopNamespaceGroups(ctx, orgID, req, metadataMap)
if err != nil {
return nil, err
}
if len(pageGroups) == 0 {
resp.Records = []inframonitoringtypes.NamespaceRecord{}
return resp, nil
}
filterExpr := ""
if req.Filter != nil {
filterExpr = req.Filter.Expression
}
fullQueryReq := buildFullQueryRequest(req.Start, req.End, filterExpr, req.GroupBy, pageGroups, m.newNamespacesTableListQuery())
queryResp, err := m.querier.QueryRange(ctx, orgID, fullQueryReq)
if err != nil {
return nil, err
}
// Reuse the pods phase-counts CTE function via a temp struct — it reads only
// Start/End/Filter/GroupBy from PostablePods.
phaseCounts, err := m.getPerGroupPodPhaseCounts(ctx, &inframonitoringtypes.PostablePods{
Start: req.Start,
End: req.End,
Filter: req.Filter,
GroupBy: req.GroupBy,
}, pageGroups)
if err != nil {
return nil, err
}
resp.Records = buildNamespaceRecords(queryResp, pageGroups, req.GroupBy, metadataMap, phaseCounts)
resp.Warning = queryResp.Warning
return resp, nil
}

View File

@@ -0,0 +1,121 @@
package implinframonitoring
import (
"context"
"slices"
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
// buildNamespaceRecords assembles the page records. Pod phase counts come from
// phaseCounts in both modes; every row is a group of pods, so there's no
// per-row "current phase" concept (unlike pods/nodes list mode).
func buildNamespaceRecords(
resp *qbtypes.QueryRangeResponse,
pageGroups []map[string]string,
groupBy []qbtypes.GroupByKey,
metadataMap map[string]map[string]string,
phaseCounts map[string]podPhaseCounts,
) []inframonitoringtypes.NamespaceRecord {
metricsMap := parseFullQueryResponse(resp, groupBy)
records := make([]inframonitoringtypes.NamespaceRecord, 0, len(pageGroups))
for _, labels := range pageGroups {
compositeKey := compositeKeyFromLabels(labels, groupBy)
namespaceName := labels[namespaceNameAttrKey]
record := inframonitoringtypes.NamespaceRecord{ // initialize with default values
NamespaceName: namespaceName,
NamespaceCPU: -1,
NamespaceMemory: -1,
Meta: map[string]any{},
}
if metrics, ok := metricsMap[compositeKey]; ok {
if v, exists := metrics["A"]; exists {
record.NamespaceCPU = v
}
if v, exists := metrics["D"]; exists {
record.NamespaceMemory = v
}
}
if phaseCountsForGroup, ok := phaseCounts[compositeKey]; ok {
record.PendingPodCount = phaseCountsForGroup.Pending
record.RunningPodCount = phaseCountsForGroup.Running
record.SucceededPodCount = phaseCountsForGroup.Succeeded
record.FailedPodCount = phaseCountsForGroup.Failed
record.UnknownPodCount = phaseCountsForGroup.Unknown
}
if attrs, ok := metadataMap[compositeKey]; ok {
for k, v := range attrs {
record.Meta[k] = v
}
}
records = append(records, record)
}
return records
}
func (m *module) getTopNamespaceGroups(
ctx context.Context,
orgID valuer.UUID,
req *inframonitoringtypes.PostableNamespaces,
metadataMap map[string]map[string]string,
) ([]map[string]string, error) {
orderByKey := req.OrderBy.Key.Name
queryNamesForOrderBy := orderByToNamespacesQueryNames[orderByKey]
rankingQueryName := queryNamesForOrderBy[len(queryNamesForOrderBy)-1]
topReq := &qbtypes.QueryRangeRequest{
Start: uint64(req.Start),
End: uint64(req.End),
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: make([]qbtypes.QueryEnvelope, 0, len(queryNamesForOrderBy)),
},
}
for _, envelope := range m.newNamespacesTableListQuery().CompositeQuery.Queries {
if !slices.Contains(queryNamesForOrderBy, envelope.GetQueryName()) {
continue
}
copied := envelope
if copied.Type == qbtypes.QueryTypeBuilder {
existingExpr := ""
if f := copied.GetFilter(); f != nil {
existingExpr = f.Expression
}
reqFilterExpr := ""
if req.Filter != nil {
reqFilterExpr = req.Filter.Expression
}
merged := mergeFilterExpressions(existingExpr, reqFilterExpr)
copied.SetFilter(&qbtypes.Filter{Expression: merged})
copied.SetGroupBy(req.GroupBy)
}
topReq.CompositeQuery.Queries = append(topReq.CompositeQuery.Queries, copied)
}
resp, err := m.querier.QueryRange(ctx, orgID, topReq)
if err != nil {
return nil, err
}
allMetricGroups := parseAndSortGroups(resp, rankingQueryName, req.GroupBy, req.OrderBy.Direction)
return paginateWithBackfill(allMetricGroups, metadataMap, req.GroupBy, req.Offset, req.Limit), nil
}
func (m *module) getNamespacesTableMetadata(ctx context.Context, req *inframonitoringtypes.PostableNamespaces) (map[string]map[string]string, error) {
var nonGroupByAttrs []string
for _, key := range namespaceAttrKeysForMetadata {
if !isKeyInGroupByAttrs(req.GroupBy, key) {
nonGroupByAttrs = append(nonGroupByAttrs, key)
}
}
return m.getMetadata(ctx, namespacesTableMetricNamesList, req.GroupBy, nonGroupByAttrs, req.Filter, req.Start, req.End)
}

View File

@@ -0,0 +1,92 @@
package implinframonitoring
import (
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
const (
namespaceNameAttrKey = "k8s.namespace.name"
)
var namespaceNameGroupByKey = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: namespaceNameAttrKey,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
// namespacesTableMetricNamesList drives the existence/retention check.
// Includes k8s.pod.phase so the response short-circuits cleanly when a
// cluster doesn't ship the metric — even though phase isn't part of the
// QB composite query (it's queried separately via getPerGroupPodPhaseCounts).
var namespacesTableMetricNamesList = []string{
"k8s.pod.cpu.usage",
"k8s.pod.memory.working_set",
"k8s.pod.phase",
}
var namespaceAttrKeysForMetadata = []string{
"k8s.namespace.name",
"k8s.cluster.name",
}
var orderByToNamespacesQueryNames = map[string][]string{
inframonitoringtypes.NamespacesOrderByCPU: {"A"},
inframonitoringtypes.NamespacesOrderByMemory: {"D"},
}
// newNamespacesTableListQuery builds the composite QB v5 request for the namespaces list.
// Pod phase counts are derived separately via getPerGroupPodPhaseCounts (works for both
// list and grouped_list modes), so no phase query is included here.
// Query letters A and D are kept aligned with the v1 implementation.
func (m *module) newNamespacesTableListQuery() *qbtypes.QueryRangeRequest {
queries := []qbtypes.QueryEnvelope{
// Query A: CPU usage — sum of pod CPU within the group.
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "A",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "k8s.pod.cpu.usage",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationSum,
ReduceTo: qbtypes.ReduceToAvg,
},
},
GroupBy: []qbtypes.GroupByKey{namespaceNameGroupByKey},
Disabled: false,
},
},
// Query D: Memory working set — sum of pod memory within the group.
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "D",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "k8s.pod.memory.working_set",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationSum,
ReduceTo: qbtypes.ReduceToAvg,
},
},
GroupBy: []qbtypes.GroupByKey{namespaceNameGroupByKey},
Disabled: false,
},
},
}
return &qbtypes.QueryRangeRequest{
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
}
}

View File

@@ -0,0 +1,299 @@
package implinframonitoring
import (
"context"
"fmt"
"slices"
"strings"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
)
// buildNodeRecords assembles the page records. Condition counts come from
// conditionCounts in both modes. In list mode (isNodeNameInGroupBy=true) each
// group is one node, so exactly one count is 1; Condition is derived from
// which one. In grouped_list mode Condition stays NodeConditionNone.
func buildNodeRecords(
isNodeNameInGroupBy bool,
resp *qbtypes.QueryRangeResponse,
pageGroups []map[string]string,
groupBy []qbtypes.GroupByKey,
metadataMap map[string]map[string]string,
conditionCounts map[string]nodeConditionCounts,
) []inframonitoringtypes.NodeRecord {
metricsMap := parseFullQueryResponse(resp, groupBy)
records := make([]inframonitoringtypes.NodeRecord, 0, len(pageGroups))
for _, labels := range pageGroups {
compositeKey := compositeKeyFromLabels(labels, groupBy)
nodeName := labels[nodeNameAttrKey]
record := inframonitoringtypes.NodeRecord{ // initialize with default values
NodeName: nodeName,
Condition: inframonitoringtypes.NodeConditionNone,
NodeCPU: -1,
NodeCPUAllocatable: -1,
NodeMemory: -1,
NodeMemoryAllocatable: -1,
Meta: map[string]any{},
}
if metrics, ok := metricsMap[compositeKey]; ok {
if v, exists := metrics["A"]; exists {
record.NodeCPU = v
}
if v, exists := metrics["B"]; exists {
record.NodeCPUAllocatable = v
}
if v, exists := metrics["C"]; exists {
record.NodeMemory = v
}
if v, exists := metrics["D"]; exists {
record.NodeMemoryAllocatable = v
}
}
if conditionCountsForGroup, ok := conditionCounts[compositeKey]; ok {
record.ReadyNodesCount = conditionCountsForGroup.Ready
record.NotReadyNodesCount = conditionCountsForGroup.NotReady
// In list mode each group is one node; the count==1 bucket identifies the condition.
if isNodeNameInGroupBy {
switch {
case conditionCountsForGroup.Ready == 1:
record.Condition = inframonitoringtypes.NodeConditionReady
case conditionCountsForGroup.NotReady == 1:
record.Condition = inframonitoringtypes.NodeConditionNotReady
}
}
}
if attrs, ok := metadataMap[compositeKey]; ok {
for k, v := range attrs {
record.Meta[k] = v
}
}
records = append(records, record)
}
return records
}
func (m *module) getTopNodeGroups(
ctx context.Context,
orgID valuer.UUID,
req *inframonitoringtypes.PostableNodes,
metadataMap map[string]map[string]string,
) ([]map[string]string, error) {
orderByKey := req.OrderBy.Key.Name
queryNamesForOrderBy := orderByToNodesQueryNames[orderByKey]
rankingQueryName := queryNamesForOrderBy[len(queryNamesForOrderBy)-1]
topReq := &qbtypes.QueryRangeRequest{
Start: uint64(req.Start),
End: uint64(req.End),
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: make([]qbtypes.QueryEnvelope, 0, len(queryNamesForOrderBy)),
},
}
for _, envelope := range m.newNodesTableListQuery().CompositeQuery.Queries {
if !slices.Contains(queryNamesForOrderBy, envelope.GetQueryName()) {
continue
}
copied := envelope
if copied.Type == qbtypes.QueryTypeBuilder {
existingExpr := ""
if f := copied.GetFilter(); f != nil {
existingExpr = f.Expression
}
reqFilterExpr := ""
if req.Filter != nil {
reqFilterExpr = req.Filter.Expression
}
merged := mergeFilterExpressions(existingExpr, reqFilterExpr)
copied.SetFilter(&qbtypes.Filter{Expression: merged})
copied.SetGroupBy(req.GroupBy)
}
topReq.CompositeQuery.Queries = append(topReq.CompositeQuery.Queries, copied)
}
resp, err := m.querier.QueryRange(ctx, orgID, topReq)
if err != nil {
return nil, err
}
allMetricGroups := parseAndSortGroups(resp, rankingQueryName, req.GroupBy, req.OrderBy.Direction)
return paginateWithBackfill(allMetricGroups, metadataMap, req.GroupBy, req.Offset, req.Limit), nil
}
func (m *module) getNodesTableMetadata(ctx context.Context, req *inframonitoringtypes.PostableNodes) (map[string]map[string]string, error) {
var nonGroupByAttrs []string
for _, key := range nodeAttrKeysForMetadata {
if !isKeyInGroupByAttrs(req.GroupBy, key) {
nonGroupByAttrs = append(nonGroupByAttrs, key)
}
}
return m.getMetadata(ctx, nodesTableMetricNamesList, req.GroupBy, nonGroupByAttrs, req.Filter, req.Start, req.End)
}
// getPerGroupNodeConditionCounts computes per-group node counts bucketed by each
// node's latest condition_ready value (0 / 1) in the requested window.
// Pipeline:
//
// timeSeriesFPs: fp ↔ (node_name, groupBy cols) from the time_series table.
// User filter + page-groups filter applied here.
// latestConditionPerNode: INNER JOIN samples × timeSeriesFPs, collapsed to
// the latest condition value per node via argMax(value, unix_milli).
// countNodesPerCondition: per-group uniqExactIf into ready/not_ready buckets.
//
// Groups absent from the result map have implicit zero counts (caller default).
func (m *module) getPerGroupNodeConditionCounts(
ctx context.Context,
req *inframonitoringtypes.PostableNodes,
pageGroups []map[string]string,
) (map[string]nodeConditionCounts, error) {
if len(pageGroups) == 0 || len(req.GroupBy) == 0 {
return map[string]nodeConditionCounts{}, nil
}
// Merged filter expression (user filter + page-groups IN clauses).
reqFilterExpr := ""
if req.Filter != nil {
reqFilterExpr = req.Filter.Expression
}
pageGroupsFilterExpr := buildPageGroupsFilterExpr(pageGroups)
filterExpr := mergeFilterExpressions(reqFilterExpr, pageGroupsFilterExpr)
// Resolve tables. Same convention as pods.
adjustedStart, adjustedEnd, _, localTimeSeriesTable := telemetrymetrics.WhichTSTableToUse(
uint64(req.Start), uint64(req.End), nil,
)
samplesTable := telemetrymetrics.WhichSamplesTableToUse(
uint64(req.Start), uint64(req.End),
metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil,
)
valueCol := telemetrymetrics.ValueColumnForSamplesTable(samplesTable)
// ----- timeSeriesFPs -----
timeSeriesFPs := sqlbuilder.NewSelectBuilder()
timeSeriesFPsSelectCols := []string{
"fingerprint",
fmt.Sprintf("JSONExtractString(labels, %s) AS node_name", timeSeriesFPs.Var(nodeNameAttrKey)),
}
for _, key := range req.GroupBy {
timeSeriesFPsSelectCols = append(timeSeriesFPsSelectCols,
fmt.Sprintf("JSONExtractString(labels, %s) AS %s", timeSeriesFPs.Var(key.Name), quoteIdentifier(key.Name)),
)
}
timeSeriesFPs.Select(timeSeriesFPsSelectCols...)
timeSeriesFPs.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, localTimeSeriesTable))
timeSeriesFPs.Where(
timeSeriesFPs.E("metric_name", nodeConditionMetricName),
timeSeriesFPs.GE("unix_milli", adjustedStart),
timeSeriesFPs.L("unix_milli", adjustedEnd),
)
if filterExpr != "" {
filterClause, err := m.buildFilterClause(ctx, &qbtypes.Filter{Expression: filterExpr}, req.Start, req.End)
if err != nil {
return nil, err
}
if filterClause != nil {
timeSeriesFPs.AddWhereClause(filterClause)
}
}
timeSeriesFPsGroupBy := []string{"fingerprint", "node_name"}
for _, key := range req.GroupBy {
timeSeriesFPsGroupBy = append(timeSeriesFPsGroupBy, quoteIdentifier(key.Name))
}
timeSeriesFPs.GroupBy(timeSeriesFPsGroupBy...)
timeSeriesFPsSQL, timeSeriesFPsArgs := timeSeriesFPs.BuildWithFlavor(sqlbuilder.ClickHouse)
// ----- latestConditionPerNode -----
latestConditionPerNode := sqlbuilder.NewSelectBuilder()
latestConditionPerNodeSelectCols := []string{"tsfp.node_name AS node_name"}
latestConditionPerNodeGroupBy := []string{"node_name"}
for _, key := range req.GroupBy {
col := quoteIdentifier(key.Name)
latestConditionPerNodeSelectCols = append(latestConditionPerNodeSelectCols, fmt.Sprintf("tsfp.%s AS %s", col, col))
latestConditionPerNodeGroupBy = append(latestConditionPerNodeGroupBy, col)
}
latestConditionPerNodeSelectCols = append(latestConditionPerNodeSelectCols,
fmt.Sprintf("argMax(samples.%s, samples.unix_milli) AS condition_value", valueCol),
)
latestConditionPerNode.Select(latestConditionPerNodeSelectCols...)
latestConditionPerNode.From(fmt.Sprintf(
"%s.%s AS samples INNER JOIN time_series_fps AS tsfp ON samples.fingerprint = tsfp.fingerprint",
telemetrymetrics.DBName, samplesTable,
))
latestConditionPerNode.Where(
latestConditionPerNode.E("samples.metric_name", nodeConditionMetricName),
latestConditionPerNode.GE("samples.unix_milli", req.Start),
latestConditionPerNode.L("samples.unix_milli", req.End),
"tsfp.node_name != ''",
)
latestConditionPerNode.GroupBy(latestConditionPerNodeGroupBy...)
latestConditionPerNodeSQL, latestConditionPerNodeArgs := latestConditionPerNode.BuildWithFlavor(sqlbuilder.ClickHouse)
// ----- countNodesPerCondition (outer SELECT) -----
countNodesPerConditionSelectCols := make([]string, 0, len(req.GroupBy)+2)
countNodesPerConditionGroupBy := make([]string, 0, len(req.GroupBy))
for _, key := range req.GroupBy {
col := quoteIdentifier(key.Name)
countNodesPerConditionSelectCols = append(countNodesPerConditionSelectCols, col)
countNodesPerConditionGroupBy = append(countNodesPerConditionGroupBy, col)
}
countNodesPerConditionSelectCols = append(countNodesPerConditionSelectCols,
fmt.Sprintf("uniqExactIf(node_name, condition_value = %d) AS ready_count", inframonitoringtypes.NodeConditionNumReady),
fmt.Sprintf("uniqExactIf(node_name, condition_value = %d) AS not_ready_count", inframonitoringtypes.NodeConditionNumNotReady),
)
countNodesPerConditionSQL := fmt.Sprintf(
"SELECT %s FROM latest_condition_per_node GROUP BY %s",
strings.Join(countNodesPerConditionSelectCols, ", "),
strings.Join(countNodesPerConditionGroupBy, ", "),
)
// Combine CTEs + outer.
cteFragments := []string{
fmt.Sprintf("time_series_fps AS (%s)", timeSeriesFPsSQL),
fmt.Sprintf("latest_condition_per_node AS (%s)", latestConditionPerNodeSQL),
}
finalSQL := querybuilder.CombineCTEs(cteFragments) + countNodesPerConditionSQL
finalArgs := querybuilder.PrependArgs([][]any{timeSeriesFPsArgs, latestConditionPerNodeArgs}, nil)
rows, err := m.telemetryStore.ClickhouseDB().Query(ctx, finalSQL, finalArgs...)
if err != nil {
return nil, err
}
defer rows.Close()
result := make(map[string]nodeConditionCounts)
for rows.Next() {
groupVals := make([]string, len(req.GroupBy))
scanPtrs := make([]any, 0, len(req.GroupBy)+2)
for i := range groupVals {
scanPtrs = append(scanPtrs, &groupVals[i])
}
var ready, notReady uint64
scanPtrs = append(scanPtrs, &ready, &notReady)
if err := rows.Scan(scanPtrs...); err != nil {
return nil, err
}
result[compositeKeyFromList(groupVals)] = nodeConditionCounts{
Ready: int(ready),
NotReady: int(notReady),
}
}
if err := rows.Err(); err != nil {
return nil, err
}
return result, nil
}

View File

@@ -0,0 +1,134 @@
package implinframonitoring
import (
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
const (
nodeNameAttrKey = "k8s.node.name"
nodeConditionMetricName = "k8s.node.condition_ready"
)
var nodeNameGroupByKey = qbtypes.GroupByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: nodeNameAttrKey,
FieldContext: telemetrytypes.FieldContextResource,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
// nodesTableMetricNamesList drives the existence/retention check.
// Includes condition_ready so the response short-circuits cleanly when a
// cluster doesn't ship the metric — even though condition_ready isn't part
// of the QB composite query (it's queried separately via getPerGroupNodeConditionCounts).
var nodesTableMetricNamesList = []string{
"k8s.node.cpu.usage",
"k8s.node.allocatable_cpu",
"k8s.node.memory.working_set",
"k8s.node.allocatable_memory",
"k8s.node.condition_ready",
}
var nodeAttrKeysForMetadata = []string{
"k8s.node.uid",
"k8s.cluster.name",
}
var orderByToNodesQueryNames = map[string][]string{
inframonitoringtypes.NodesOrderByCPU: {"A"},
inframonitoringtypes.NodesOrderByCPUAllocatable: {"B"},
inframonitoringtypes.NodesOrderByMemory: {"C"},
inframonitoringtypes.NodesOrderByMemoryAllocatable: {"D"},
}
// newNodesTableListQuery builds the composite QB v5 request for the nodes list.
// Node condition is derived separately via getPerGroupNodeConditionCounts (works
// for both list and grouped_list modes), so no condition query is included here.
func (m *module) newNodesTableListQuery() *qbtypes.QueryRangeRequest {
queries := []qbtypes.QueryEnvelope{
// Query A: CPU usage
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "A",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "k8s.node.cpu.usage",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationSum,
ReduceTo: qbtypes.ReduceToAvg,
},
},
GroupBy: []qbtypes.GroupByKey{nodeNameGroupByKey},
Disabled: false,
},
},
// Query B: CPU allocatable.
// TimeAggregationLatest is the closest v5 equivalent of v1's AnyLast;
// allocatable values change rarely so divergence in practice is negligible.
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "B",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "k8s.node.allocatable_cpu",
TimeAggregation: metrictypes.TimeAggregationLatest,
SpaceAggregation: metrictypes.SpaceAggregationSum,
ReduceTo: qbtypes.ReduceToAvg,
},
},
GroupBy: []qbtypes.GroupByKey{nodeNameGroupByKey},
Disabled: false,
},
},
// Query C: Memory working set
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "C",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "k8s.node.memory.working_set",
TimeAggregation: metrictypes.TimeAggregationAvg,
SpaceAggregation: metrictypes.SpaceAggregationSum,
ReduceTo: qbtypes.ReduceToAvg,
},
},
GroupBy: []qbtypes.GroupByKey{nodeNameGroupByKey},
Disabled: false,
},
},
// Query D: Memory allocatable. Same Latest caveat as Query B.
{
Type: qbtypes.QueryTypeBuilder,
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
Name: "D",
Signal: telemetrytypes.SignalMetrics,
Aggregations: []qbtypes.MetricAggregation{
{
MetricName: "k8s.node.allocatable_memory",
TimeAggregation: metrictypes.TimeAggregationLatest,
SpaceAggregation: metrictypes.SpaceAggregationSum,
ReduceTo: qbtypes.ReduceToAvg,
},
},
GroupBy: []qbtypes.GroupByKey{nodeNameGroupByKey},
Disabled: false,
},
},
}
return &qbtypes.QueryRangeRequest{
RequestType: qbtypes.RequestTypeScalar,
CompositeQuery: qbtypes.CompositeQuery{
Queries: queries,
},
}
}

View File

@@ -11,9 +11,13 @@ import (
type Handler interface {
ListHosts(http.ResponseWriter, *http.Request)
ListPods(http.ResponseWriter, *http.Request)
ListNodes(http.ResponseWriter, *http.Request)
ListNamespaces(http.ResponseWriter, *http.Request)
}
type Module interface {
ListHosts(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostableHosts) (*inframonitoringtypes.Hosts, error)
ListPods(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostablePods) (*inframonitoringtypes.Pods, error)
ListNodes(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostableNodes) (*inframonitoringtypes.Nodes, error)
ListNamespaces(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PostableNamespaces) (*inframonitoringtypes.Namespaces, error)
}

View File

@@ -6,20 +6,18 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/quickfilter"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type setter struct {
store types.OrganizationStore
alertmanager alertmanager.Alertmanager
quickfilter quickfilter.Module
systemDashboard systemdashboard.Module
store types.OrganizationStore
alertmanager alertmanager.Alertmanager
quickfilter quickfilter.Module
}
func NewSetter(store types.OrganizationStore, alertmanager alertmanager.Alertmanager, quickfilter quickfilter.Module, systemDashboard systemdashboard.Module) organization.Setter {
return &setter{store: store, alertmanager: alertmanager, quickfilter: quickfilter, systemDashboard: systemDashboard}
func NewSetter(store types.OrganizationStore, alertmanager alertmanager.Alertmanager, quickfilter quickfilter.Module) organization.Setter {
return &setter{store: store, alertmanager: alertmanager, quickfilter: quickfilter}
}
func (module *setter) Create(ctx context.Context, organization *types.Organization, createManagedRoles func(context.Context, valuer.UUID) error) error {
@@ -35,10 +33,6 @@ func (module *setter) Create(ctx context.Context, organization *types.Organizati
return err
}
if err := module.systemDashboard.SetDefaultConfig(ctx, organization.ID); err != nil {
return err
}
if err := createManagedRoles(ctx, organization.ID); err != nil {
return err
}

View File

@@ -1,102 +0,0 @@
package implsystemdashboard
import (
"context"
"encoding/json"
"net/http"
"time"
"github.com/gorilla/mux"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/http/render"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type handler struct {
module systemdashboard.Module
}
func NewHandler(module systemdashboard.Module) systemdashboard.Handler {
return &handler{module: module}
}
func (handler *handler) Get(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
dashboard, err := handler.module.Get(ctx, valuer.MustNewUUID(claims.OrgID), parseSource(r))
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, dashboard)
}
func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
data := dashboardtypes.UpdatableDashboard{}
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
render.Error(rw, errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid request body"))
return
}
if data == nil {
render.Error(rw, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "data is required"))
return
}
dashboard, err := handler.module.Update(ctx, valuer.MustNewUUID(claims.OrgID), parseSource(r), &dashboardtypes.Dashboard{
Data: data,
UserAuditable: types.UserAuditable{UpdatedBy: claims.Email},
})
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, dashboard)
}
func (handler *handler) Reset(rw http.ResponseWriter, r *http.Request) {
ctx, cancel := context.WithTimeout(r.Context(), 10*time.Second)
defer cancel()
claims, err := authtypes.ClaimsFromContext(ctx)
if err != nil {
render.Error(rw, err)
return
}
dashboard, err := handler.module.Reset(ctx, valuer.MustNewUUID(claims.OrgID), parseSource(r))
if err != nil {
render.Error(rw, err)
return
}
render.Success(rw, http.StatusOK, dashboard)
}
// parseSource reads the {source} path segment.
func parseSource(r *http.Request) dashboardtypes.Source {
return dashboardtypes.Source(mux.Vars(r)["source"])
}

View File

@@ -1,138 +0,0 @@
package implsystemdashboard
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
store dashboardtypes.Store
}
func NewModule(store dashboardtypes.Store) systemdashboard.Module {
return &module{store: store}
}
func (module *module) Get(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source) (*dashboardtypes.Dashboard, error) {
storable, err := module.store.GetBySource(ctx, orgID, string(source))
if err != nil {
return nil, err
}
return dashboardtypes.NewDashboardFromStorableDashboard(storable), nil
}
// Update applies the new payload as last-writer-wins. The Get and Update run inside one transaction so a
// concurrent Reset cannot interleave and leave the response with a stale id from before the reset.
func (module *module) Update(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source, dashboard *dashboardtypes.Dashboard) (*dashboardtypes.Dashboard, error) {
if dashboard == nil {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "dashboard is required")
}
if dashboard.Data == nil {
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "dashboard.Data is required")
}
var updated *dashboardtypes.Dashboard
err := module.store.RunInTx(ctx, func(ctx context.Context) error {
existing, err := module.store.GetBySource(ctx, orgID, string(source))
if err != nil {
return err
}
existing.Data = dashboard.Data
existing.UpdatedBy = dashboard.UpdatedBy
existing.UpdatedAt = time.Now()
if err := module.store.Update(ctx, orgID, existing); err != nil {
return err
}
updated = dashboardtypes.NewDashboardFromStorableDashboard(existing)
return nil
})
if err != nil {
return nil, err
}
return updated, nil
}
func (module *module) Reset(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source) (*dashboardtypes.Dashboard, error) {
var reset *dashboardtypes.Dashboard
err := module.store.RunInTx(ctx, func(ctx context.Context) error {
defaultDashboard, err := dashboardtypes.NewDefaultSystemDashboard(orgID, source)
if err != nil {
return err
}
existing, err := module.store.GetBySource(ctx, orgID, string(source))
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}
if existing == nil {
storable, err := dashboardtypes.NewStorableDashboardFromDashboard(defaultDashboard)
if err != nil {
return err
}
if err := module.store.Create(ctx, storable); err != nil {
return err
}
reset = defaultDashboard
return nil
}
existing.Data = defaultDashboard.Data
existing.UpdatedBy = "system"
existing.UpdatedAt = time.Now()
if err := module.store.Update(ctx, orgID, existing); err != nil {
return err
}
reset = dashboardtypes.NewDashboardFromStorableDashboard(existing)
return nil
})
if err != nil {
return nil, err
}
return reset, nil
}
func (module *module) SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error {
for _, source := range dashboardtypes.SystemSources {
if err := module.setDefaultForSource(ctx, orgID, source); err != nil {
return err
}
}
return nil
}
func (module *module) setDefaultForSource(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source) error {
existing, err := module.store.GetBySource(ctx, orgID, string(source))
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}
if existing != nil {
return nil
}
dashboard, err := dashboardtypes.NewDefaultSystemDashboard(orgID, source)
if err != nil {
return err
}
storable, err := dashboardtypes.NewStorableDashboardFromDashboard(dashboard)
if err != nil {
return err
}
return module.store.Create(ctx, storable)
}

View File

@@ -1,25 +0,0 @@
package systemdashboard
import (
"context"
"net/http"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Module interface {
Get(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source) (*dashboardtypes.Dashboard, error)
Update(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source, dashboard *dashboardtypes.Dashboard) (*dashboardtypes.Dashboard, error)
Reset(ctx context.Context, orgID valuer.UUID, source dashboardtypes.Source) (*dashboardtypes.Dashboard, error)
SetDefaultConfig(ctx context.Context, orgID valuer.UUID) error
}
// Handler defines the HTTP handler interface for system dashboard endpoints.
// /api/v1/system/{source} — Get / Update
// /api/v1/system/{source}/reset — Reset.
type Handler interface {
Get(http.ResponseWriter, *http.Request)
Update(http.ResponseWriter, *http.Request)
Reset(http.ResponseWriter, *http.Request)
}

View File

@@ -40,8 +40,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/spanmapper/implspanmapper"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile/implspanpercentile"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard/implsystemdashboard"
"github.com/SigNoz/signoz/pkg/modules/tracedetail"
"github.com/SigNoz/signoz/pkg/modules/tracedetail/impltracedetail"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
@@ -57,7 +55,6 @@ type Handlers struct {
SavedView savedview.Handler
Apdex apdex.Handler
Dashboard dashboard.Handler
SystemDashboard systemdashboard.Handler
QuickFilter quickfilter.Handler
TraceFunnel tracefunnel.Handler
RawDataExport rawdataexport.Handler
@@ -102,7 +99,6 @@ func NewHandlers(
SavedView: implsavedview.NewHandler(modules.SavedView),
Apdex: implapdex.NewHandler(modules.Apdex),
Dashboard: impldashboard.NewHandler(modules.Dashboard, providerSettings, authz),
SystemDashboard: implsystemdashboard.NewHandler(modules.SystemDashboard),
QuickFilter: implquickfilter.NewHandler(modules.QuickFilter),
TraceFunnel: impltracefunnel.NewHandler(modules.TraceFunnel),
RawDataExport: implrawdataexport.NewHandler(modules.RawDataExport),

View File

@@ -17,7 +17,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/inframonitoring"
"github.com/SigNoz/signoz/pkg/modules/inframonitoring/implinframonitoring"
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -41,8 +40,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/session/implsession"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile"
"github.com/SigNoz/signoz/pkg/modules/spanpercentile/implspanpercentile"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard/implsystemdashboard"
"github.com/SigNoz/signoz/pkg/modules/tracedetail"
"github.com/SigNoz/signoz/pkg/modules/tracedetail/impltracedetail"
"github.com/SigNoz/signoz/pkg/modules/tracefunnel"
@@ -69,7 +66,6 @@ type Modules struct {
SavedView savedview.Module
Apdex apdex.Module
Dashboard dashboard.Module
SystemDashboard systemdashboard.Module
QuickFilter quickfilter.Module
TraceFunnel tracefunnel.Module
RawDataExport rawdataexport.Module
@@ -110,8 +106,7 @@ func NewModules(
fl flagger.Flagger,
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
systemDashboard := implsystemdashboard.NewModule(impldashboard.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter, systemDashboard)
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
userSetter := impluser.NewSetter(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User, userRoleStore, userGetter)
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -122,7 +117,6 @@ func NewModules(
SavedView: implsavedview.NewModule(sqlstore),
Apdex: implapdex.NewModule(sqlstore),
Dashboard: dashboard,
SystemDashboard: systemDashboard,
UserSetter: userSetter,
UserGetter: userGetter,
QuickFilter: quickfilter,

View File

@@ -30,7 +30,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/rulestatehistory"
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/systemdashboard"
"github.com/SigNoz/signoz/pkg/modules/spanmapper"
"github.com/SigNoz/signoz/pkg/modules/tracedetail"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -64,7 +63,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
struct{ flagger.Handler }{},
struct{ dashboard.Module }{},
struct{ dashboard.Handler }{},
struct{ systemdashboard.Handler }{},
struct{ metricsexplorer.Handler }{},
struct{ inframonitoring.Handler }{},
struct{ gateway.Handler }{},

View File

@@ -195,7 +195,6 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewServiceAccountAuthzactory(sqlstore),
sqlmigration.NewDropUserDeletedAtFactory(sqlstore, sqlschema),
sqlmigration.NewMigrateAWSAllRegionsFactory(sqlstore),
sqlmigration.NewAddSystemDashboardFactory(sqlstore, sqlschema),
)
}
@@ -270,7 +269,6 @@ func NewAPIServerProviderFactories(orgGetter organization.Getter, authz authz.Au
handlers.FlaggerHandler,
modules.Dashboard,
handlers.Dashboard,
handlers.SystemDashboard,
handlers.MetricsExplorer,
handlers.InfraMonitoring,
handlers.GatewayHandler,

View File

@@ -1,113 +0,0 @@
package sqlmigration
import (
"context"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type addSystemDashboard struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewAddSystemDashboardFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_system_dashboard"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &addSystemDashboard{sqlstore: sqlstore, sqlschema: sqlschema}, nil
})
}
func (migration *addSystemDashboard) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addSystemDashboard) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, "dashboard")
if err != nil {
return err
}
column := &sqlschema.Column{
Name: sqlschema.ColumnName("source"),
DataType: sqlschema.DataTypeText,
Nullable: false,
}
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, "")
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
var orgIDs []string
if err := tx.NewSelect().Model((*types.Organization)(nil)).Column("id").Scan(ctx, &orgIDs); err != nil {
return err
}
for _, rawOrgID := range orgIDs {
orgID, err := valuer.NewUUID(rawOrgID)
if err != nil {
return err
}
for _, source := range dashboardtypes.SystemSources {
count, err := tx.NewSelect().
Model((*dashboardtypes.StorableDashboard)(nil)).
Where("org_id = ?", orgID).
Where("source = ?", string(source)).
Count(ctx)
if err != nil {
return err
}
if count > 0 {
continue
}
dashboard, err := dashboardtypes.NewDefaultSystemDashboard(orgID, source)
if err != nil {
return err
}
storable, err := dashboardtypes.NewStorableDashboardFromDashboard(dashboard)
if err != nil {
return err
}
if _, err := tx.NewInsert().Model(storable).Exec(ctx); err != nil {
return err
}
}
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *addSystemDashboard) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -1,107 +0,0 @@
{
"title": "AI Observability Overview",
"description": "AI / LLM observability overview — cost, tokens, latency, errors, RED for tool calls, and time to first token. Scoped by model, environment and service (apply via the variable bar).",
"tags": ["ai", "llm", "genai", "overview"],
"version": "v5",
"variables": {
"model": {
"id": "a1000000-0000-0000-0000-000000000001",
"name": "model",
"key": "model",
"description": "LLM model",
"type": "QUERY",
"sort": "ASC",
"multiSelect": true,
"showALLOption": true,
"allSelected": true,
"queryValue": "SELECT DISTINCT attributes_string['gen_ai.request.model'] AS model FROM signoz_traces.distributed_signoz_index_v3 WHERE mapContains(attributes_string, 'gen_ai.request.model') AND timestamp >= now() - INTERVAL 1 DAY",
"customValue": "",
"textboxValue": "",
"selectedValue": [],
"order": 0,
"modificationUUID": "a1000000-0000-0000-0000-000000000011"
},
"environment": {
"id": "a1000000-0000-0000-0000-000000000002",
"name": "environment",
"key": "environment",
"description": "Deployment environment",
"type": "QUERY",
"sort": "ASC",
"multiSelect": true,
"showALLOption": true,
"allSelected": true,
"queryValue": "SELECT DISTINCT resources_string['deployment.environment'] AS environment FROM signoz_traces.distributed_signoz_index_v3 WHERE mapContains(resources_string, 'deployment.environment') AND timestamp >= now() - INTERVAL 1 DAY",
"customValue": "",
"textboxValue": "",
"selectedValue": [],
"order": 1,
"modificationUUID": "a1000000-0000-0000-0000-000000000012"
},
"service_name": {
"id": "a1000000-0000-0000-0000-000000000003",
"name": "service_name",
"key": "service_name",
"description": "Service name",
"type": "QUERY",
"sort": "ASC",
"multiSelect": true,
"showALLOption": true,
"allSelected": true,
"queryValue": "SELECT DISTINCT resources_string['service.name'] AS service_name FROM signoz_traces.distributed_signoz_index_v3 WHERE mapContains(resources_string, 'service.name') AND timestamp >= now() - INTERVAL 1 DAY",
"customValue": "",
"textboxValue": "",
"selectedValue": [],
"order": 2,
"modificationUUID": "a1000000-0000-0000-0000-000000000013"
}
},
"layout": [
{"i": "11111111-1111-1111-1111-111111111111", "x": 0, "y": 0, "w": 3, "h": 3, "moved": false, "static": false},
{"i": "22222222-2222-2222-2222-222222222222", "x": 3, "y": 0, "w": 3, "h": 3, "moved": false, "static": false},
{"i": "33333333-3333-3333-3333-333333333333", "x": 6, "y": 0, "w": 2, "h": 3, "moved": false, "static": false},
{"i": "44444444-4444-4444-4444-444444444444", "x": 8, "y": 0, "w": 2, "h": 3, "moved": false, "static": false},
{"i": "55555555-5555-5555-5555-555555555555", "x": 10, "y": 0, "w": 2, "h": 3, "moved": false, "static": false},
{"i": "66666666-6666-6666-6666-666666666666", "x": 0, "y": 3, "w": 6, "h": 4, "moved": false, "static": false},
{"i": "77777777-7777-7777-7777-777777777777", "x": 6, "y": 3, "w": 6, "h": 4, "moved": false, "static": false},
{"i": "88888888-8888-8888-8888-888888888888", "x": 0, "y": 7, "w": 6, "h": 4, "moved": false, "static": false},
{"i": "99999999-9999-9999-9999-999999999999", "x": 6, "y": 7, "w": 6, "h": 4, "moved": false, "static": false},
{"i": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", "x": 0, "y": 11, "w": 4, "h": 4, "moved": false, "static": false},
{"i": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", "x": 4, "y": 11, "w": 4, "h": 4, "moved": false, "static": false},
{"i": "cccccccc-cccc-cccc-cccc-cccccccccccc", "x": 8, "y": 11, "w": 4, "h": 4, "moved": false, "static": false},
{"i": "dddddddd-dddd-dddd-dddd-dddddddddddd", "x": 0, "y": 15, "w": 4, "h": 4, "moved": false, "static": false},
{"i": "eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee", "x": 4, "y": 15, "w": 4, "h": 4, "moved": false, "static": false},
{"i": "ffffffff-ffff-ffff-ffff-ffffffffffff", "x": 8, "y": 15, "w": 4, "h": 4, "moved": false, "static": false}
],
"widgets": [
{"id": "11111111-1111-1111-1111-111111111111", "title": "Total cost", "description": "Total LLM cost across all calls. Requires gen_ai.usage.cost attribute.", "panelTypes": "value", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "none", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q1111111-1111-1111-1111-111111111111", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "sum(gen_ai.usage.cost)"}], "filter": {"expression": ""}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "", "disabled": false, "having": {"expression": ""}, "limit": null, "reduceTo": "sum"}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "22222222-2222-2222-2222-222222222222", "title": "Total tokens", "description": "Sum of input + output tokens.", "panelTypes": "value", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "short", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q2222222-2222-2222-2222-222222222222", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "sum(gen_ai.usage.input_tokens) + sum(gen_ai.usage.output_tokens)"}], "filter": {"expression": ""}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "", "disabled": false, "having": {"expression": ""}, "limit": null, "reduceTo": "sum"}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "33333333-3333-3333-3333-333333333333", "title": "Avg latency (p95)", "description": "p95 latency of LLM spans.", "panelTypes": "value", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "ms", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q3333333-3333-3333-3333-333333333333", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "p95(duration_nano) / 1000000"}], "filter": {"expression": "gen_ai.system != ''"}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "", "disabled": false, "having": {"expression": ""}, "limit": null, "reduceTo": "avg"}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "44444444-4444-4444-4444-444444444444", "title": "Error rate", "description": "Error rate as a percentage of total LLM calls.", "panelTypes": "value", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "percent", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q4444444-4444-4444-4444-444444444444", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "countIf(has_error = true) * 100 / count()"}], "filter": {"expression": "gen_ai.system != ''"}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "", "disabled": false, "having": {"expression": ""}, "limit": null, "reduceTo": "avg"}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "55555555-5555-5555-5555-555555555555", "title": "TTFT (p95)", "description": "p95 time to first token.", "panelTypes": "value", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "ms", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q5555555-5555-5555-5555-555555555555", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "p95(gen_ai.server.ttft)"}], "filter": {"expression": ""}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "", "disabled": false, "having": {"expression": ""}, "limit": null, "reduceTo": "avg"}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "66666666-6666-6666-6666-666666666666", "title": "Cost over time", "description": "Cost by model over time.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "none", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q6666666-6666-6666-6666-666666666666", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "sum(gen_ai.usage.cost)"}], "filter": {"expression": ""}, "groupBy": [{"key": "gen_ai.request.model", "dataType": "string", "type": "tag", "isColumn": false, "isJSON": false}], "expression": "A", "orderBy": [], "legend": "{{gen_ai.request.model}}", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "77777777-7777-7777-7777-777777777777", "title": "Token usage over time", "description": "Input vs output tokens over time.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": true, "fillSpans": false, "yAxisUnit": "short", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q7777777-7777-7777-7777-777777777777", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "sum(gen_ai.usage.input_tokens)"}], "filter": {"expression": ""}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "Input", "disabled": false, "having": {"expression": ""}, "limit": null}, {"queryName": "B", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "sum(gen_ai.usage.output_tokens)"}], "filter": {"expression": ""}, "groupBy": [], "expression": "B", "orderBy": [], "legend": "Output", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "88888888-8888-8888-8888-888888888888", "title": "LLM call latency percentiles", "description": "p50, p90, p95, p99 latency by model.", "panelTypes": "table", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "ms", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q8888888-8888-8888-8888-888888888888", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "p50(duration_nano) / 1000000"}, {"expression": "p90(duration_nano) / 1000000"}, {"expression": "p95(duration_nano) / 1000000"}, {"expression": "p99(duration_nano) / 1000000"}], "filter": {"expression": "gen_ai.system != ''"}, "groupBy": [{"key": "gen_ai.request.model", "dataType": "string", "type": "tag", "isColumn": false, "isJSON": false}], "expression": "A", "orderBy": [], "legend": "{{gen_ai.request.model}}", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "99999999-9999-9999-9999-999999999999", "title": "LLM call latency over time", "description": "p95 latency trend by model.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "ms", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "q9999999-9999-9999-9999-999999999999", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "p95(duration_nano) / 1000000"}], "filter": {"expression": "gen_ai.system != ''"}, "groupBy": [{"key": "gen_ai.request.model", "dataType": "string", "type": "tag", "isColumn": false, "isJSON": false}], "expression": "A", "orderBy": [], "legend": "{{gen_ai.request.model}}", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", "title": "Error count", "description": "Errors grouped by error type.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": true, "fillSpans": false, "yAxisUnit": "short", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "qaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "count()"}], "filter": {"expression": "has_error = true AND gen_ai.system != ''"}, "groupBy": [{"key": "gen_ai.error.type", "dataType": "string", "type": "tag", "isColumn": false, "isJSON": false}], "expression": "A", "orderBy": [], "legend": "{{gen_ai.error.type}}", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", "title": "Time to first token", "description": "p95 TTFT by model.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "ms", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "qbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "p95(gen_ai.server.ttft)"}], "filter": {"expression": ""}, "groupBy": [{"key": "gen_ai.request.model", "dataType": "string", "type": "tag", "isColumn": false, "isJSON": false}], "expression": "A", "orderBy": [], "legend": "{{gen_ai.request.model}}", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "cccccccc-cccc-cccc-cccc-cccccccccccc", "title": "Top 10 span names", "description": "Top span names by count across GenAI spans.", "panelTypes": "table", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "none", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "qccccccc-cccc-cccc-cccc-cccccccccccc", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "count()"}], "filter": {"expression": "gen_ai.system != ''"}, "groupBy": [{"key": "name", "dataType": "string", "type": "tag", "isColumn": true, "isJSON": false}], "expression": "A", "orderBy": [{"columnName": "count()", "order": "desc"}], "legend": "{{name}}", "disabled": false, "having": {"expression": ""}, "limit": 10}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "dddddddd-dddd-dddd-dddd-dddddddddddd", "title": "Tool call rate", "description": "Tool call rate per second.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "reqps", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "qddddddd-dddd-dddd-dddd-dddddddddddd", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "rate()"}], "filter": {"expression": "name = 'execute_tool'"}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "req/s", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee", "title": "Tool error rate", "description": "Percentage of tool calls that errored.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "percent", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "qeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "countIf(has_error = true) * 100 / count()"}], "filter": {"expression": "name = 'execute_tool'"}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "error %", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}},
{"id": "ffffffff-ffff-ffff-ffff-ffffffffffff", "title": "Tool duration (p50)", "description": "Median tool call duration.", "panelTypes": "graph", "nullZeroValues": "zero", "opacity": "1", "isStacked": false, "fillSpans": false, "yAxisUnit": "ms", "timePreferance": "GLOBAL_TIME", "softMax": null, "softMin": null, "thresholds": [], "selectedLogFields": [], "selectedTracesFields": [], "query": {"queryType": "builder", "promql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "clickhouse_sql": [{"disabled": false, "legend": "", "name": "A", "query": ""}], "id": "qfffffff-ffff-ffff-ffff-ffffffffffff", "builder": {"queryData": [{"queryName": "A", "stepInterval": 60, "dataSource": "traces", "aggregations": [{"expression": "p50(duration_nano) / 1000000"}], "filter": {"expression": "name = 'execute_tool'"}, "groupBy": [], "expression": "A", "orderBy": [], "legend": "p50", "disabled": false, "having": {"expression": ""}, "limit": null}], "queryFormulas": [], "queryTraceOperator": []}}}
]
}

View File

@@ -37,7 +37,6 @@ type StorableDashboard struct {
Data StorableDashboardData `bun:"data,type:text,notnull"`
Locked bool `bun:"locked,notnull,default:false"`
OrgID valuer.UUID `bun:"org_id,notnull"`
Source string `bun:"source,type:text,notnull"`
}
type Dashboard struct {
@@ -48,7 +47,6 @@ type Dashboard struct {
Data StorableDashboardData `json:"data"`
Locked bool `json:"locked"`
OrgID valuer.UUID `json:"org_id"`
Source string `json:"source"`
}
type LockUnlockDashboard struct {
@@ -88,11 +86,10 @@ func NewStorableDashboardFromDashboard(dashboard *Dashboard) (*StorableDashboard
OrgID: dashboard.OrgID,
Data: dashboard.Data,
Locked: dashboard.Locked,
Source: dashboard.Source,
}, nil
}
func NewDashboard(orgID valuer.UUID, createdBy string, data StorableDashboardData, source Source) (*Dashboard, error) {
func NewDashboard(orgID valuer.UUID, createdBy string, storableDashboardData StorableDashboardData) (*Dashboard, error) {
currentTime := time.Now()
return &Dashboard{
@@ -106,8 +103,8 @@ func NewDashboard(orgID valuer.UUID, createdBy string, data StorableDashboardDat
UpdatedBy: createdBy,
},
OrgID: orgID,
Data: data,
Source: string(source),
Data: storableDashboardData,
Locked: false,
}, nil
}
@@ -125,7 +122,6 @@ func NewDashboardFromStorableDashboard(storableDashboard *StorableDashboard) *Da
OrgID: storableDashboard.OrgID,
Data: storableDashboard.Data,
Locked: storableDashboard.Locked,
Source: storableDashboard.Source,
}
}
@@ -158,7 +154,6 @@ func NewGettableDashboardFromDashboard(dashboard *Dashboard) (*GettableDashboard
OrgID: dashboard.OrgID,
Data: dashboard.Data,
Locked: dashboard.Locked,
Source: dashboard.Source,
}, nil
}

View File

@@ -66,7 +66,7 @@ func TestCanUpdate_MultipleDeletions_ByDiff(t *testing.T) {
initial := StorableDashboardData{
"widgets": makeTestWidgets("a", "b", "c"),
}
d, err := NewDashboard(orgID, "tester", initial, "")
d, err := NewDashboard(orgID, "tester", initial)
assert.NoError(t, err)
updated := StorableDashboardData{

View File

@@ -1,40 +0,0 @@
package dashboardtypes
import (
_ "embed"
"encoding/json"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Source string
const (
SourceAIO11yOverview Source = "ai-o11y-overview"
)
var SystemSources = []Source{
SourceAIO11yOverview,
}
//go:embed ai_o11y_overview.json
var aiO11yOverviewJSON []byte
func NewDefaultSystemDashboard(orgID valuer.UUID, source Source) (*Dashboard, error) {
switch source {
case SourceAIO11yOverview:
return newDefaultAIO11yOverview(orgID)
default:
return nil, errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "no defaults registered for system dashboard source %s", source)
}
}
func newDefaultAIO11yOverview(orgID valuer.UUID) (*Dashboard, error) {
data := StorableDashboardData{}
if err := json.Unmarshal(aiO11yOverviewJSON, &data); err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to unmarshal embedded ai-o11y-overview default")
}
return NewDashboard(orgID, "system", data, SourceAIO11yOverview)
}

View File

@@ -13,8 +13,6 @@ type Store interface {
Get(context.Context, valuer.UUID, valuer.UUID) (*StorableDashboard, error)
GetBySource(context.Context, valuer.UUID, string) (*StorableDashboard, error)
GetPublic(context.Context, string) (*StorablePublicDashboard, error)
GetDashboardByOrgsAndPublicID(context.Context, []string, string) (*StorableDashboard, error)

View File

@@ -0,0 +1,103 @@
package inframonitoringtypes
import (
"encoding/json"
"slices"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
type Namespaces struct {
Type ResponseType `json:"type" required:"true"`
Records []NamespaceRecord `json:"records" required:"true"`
Total int `json:"total" required:"true"`
RequiredMetricsCheck RequiredMetricsCheck `json:"requiredMetricsCheck" required:"true"`
EndTimeBeforeRetention bool `json:"endTimeBeforeRetention" required:"true"`
Warning *qbtypes.QueryWarnData `json:"warning,omitempty"`
}
type NamespaceRecord struct {
NamespaceName string `json:"namespaceName" required:"true"`
NamespaceCPU float64 `json:"namespaceCPU" required:"true"`
NamespaceMemory float64 `json:"namespaceMemory" required:"true"`
PendingPodCount int `json:"pendingPodCount" required:"true"`
RunningPodCount int `json:"runningPodCount" required:"true"`
SucceededPodCount int `json:"succeededPodCount" required:"true"`
FailedPodCount int `json:"failedPodCount" required:"true"`
UnknownPodCount int `json:"unknownPodCount" required:"true"`
Meta map[string]interface{} `json:"meta" required:"true"`
}
// PostableNamespaces is the request body for the v2 namespaces list API.
type PostableNamespaces struct {
Start int64 `json:"start" required:"true"`
End int64 `json:"end" required:"true"`
Filter *qbtypes.Filter `json:"filter"`
GroupBy []qbtypes.GroupByKey `json:"groupBy"`
OrderBy *qbtypes.OrderBy `json:"orderBy"`
Offset int `json:"offset"`
Limit int `json:"limit" required:"true"`
}
// Validate ensures PostableNamespaces contains acceptable values.
func (req *PostableNamespaces) Validate() error {
if req == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.Start <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid start time %d: start must be greater than 0",
req.Start,
)
}
if req.End <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid end time %d: end must be greater than 0",
req.End,
)
}
if req.Start >= req.End {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time range: start (%d) must be less than end (%d)",
req.Start,
req.End,
)
}
if req.Limit < 1 || req.Limit > 5000 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
}
if req.Offset < 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "offset cannot be negative")
}
if req.OrderBy != nil {
if !slices.Contains(NamespacesValidOrderByKeys, req.OrderBy.Key.Name) {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by key: %s", req.OrderBy.Key.Name)
}
if req.OrderBy.Direction != qbtypes.OrderDirectionAsc && req.OrderBy.Direction != qbtypes.OrderDirectionDesc {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by direction: %s", req.OrderBy.Direction)
}
}
return nil
}
// UnmarshalJSON validates input immediately after decoding.
func (req *PostableNamespaces) UnmarshalJSON(data []byte) error {
type raw PostableNamespaces
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = PostableNamespaces(decoded)
return req.Validate()
}

View File

@@ -0,0 +1,11 @@
package inframonitoringtypes
const (
NamespacesOrderByCPU = "cpu"
NamespacesOrderByMemory = "memory"
)
var NamespacesValidOrderByKeys = []string{
NamespacesOrderByCPU,
NamespacesOrderByMemory,
}

View File

@@ -0,0 +1,237 @@
package inframonitoringtypes
import (
"testing"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/require"
)
func TestPostableNamespaces_Validate(t *testing.T) {
tests := []struct {
name string
req *PostableNamespaces
wantErr bool
}{
{
name: "valid request",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: false,
},
{
name: "nil request",
req: nil,
wantErr: true,
},
{
name: "start time zero",
req: &PostableNamespaces{
Start: 0,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "start time negative",
req: &PostableNamespaces{
Start: -1000,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "end time zero",
req: &PostableNamespaces{
Start: 1000,
End: 0,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "start time greater than end time",
req: &PostableNamespaces{
Start: 2000,
End: 1000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "start time equal to end time",
req: &PostableNamespaces{
Start: 1000,
End: 1000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "limit zero",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 0,
Offset: 0,
},
wantErr: true,
},
{
name: "limit negative",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: -10,
Offset: 0,
},
wantErr: true,
},
{
name: "limit exceeds max",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 5001,
Offset: 0,
},
wantErr: true,
},
{
name: "offset negative",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: -5,
},
wantErr: true,
},
{
name: "orderBy nil is valid",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: false,
},
{
name: "orderBy with valid key cpu and direction asc",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NamespacesOrderByCPU,
},
},
Direction: qbtypes.OrderDirectionAsc,
},
},
wantErr: false,
},
{
name: "orderBy with valid key memory and direction desc",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NamespacesOrderByMemory,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
wantErr: false,
},
{
name: "orderBy with pod_phase key is rejected",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "pod_phase",
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
wantErr: true,
},
{
name: "orderBy with invalid key",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown",
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
wantErr: true,
},
{
name: "orderBy with valid key but invalid direction",
req: &PostableNamespaces{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NamespacesOrderByMemory,
},
},
Direction: qbtypes.OrderDirection{String: valuer.NewString("invalid")},
},
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.req.Validate()
if tt.wantErr {
require.Error(t, err)
require.True(t, errors.Ast(err, errors.TypeInvalidInput), "expected error to be of type InvalidInput")
} else {
require.NoError(t, err)
}
})
}
}

View File

@@ -0,0 +1,103 @@
package inframonitoringtypes
import (
"encoding/json"
"slices"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
)
type Nodes struct {
Type ResponseType `json:"type" required:"true"`
Records []NodeRecord `json:"records" required:"true"`
Total int `json:"total" required:"true"`
RequiredMetricsCheck RequiredMetricsCheck `json:"requiredMetricsCheck" required:"true"`
EndTimeBeforeRetention bool `json:"endTimeBeforeRetention" required:"true"`
Warning *qbtypes.QueryWarnData `json:"warning,omitempty"`
}
type NodeRecord struct {
NodeName string `json:"nodeName" required:"true"`
Condition NodeCondition `json:"condition" required:"true"`
ReadyNodesCount int `json:"readyNodesCount" required:"true"`
NotReadyNodesCount int `json:"notReadyNodesCount" required:"true"`
NodeCPU float64 `json:"nodeCPU" required:"true"`
NodeCPUAllocatable float64 `json:"nodeCPUAllocatable" required:"true"`
NodeMemory float64 `json:"nodeMemory" required:"true"`
NodeMemoryAllocatable float64 `json:"nodeMemoryAllocatable" required:"true"`
Meta map[string]interface{} `json:"meta" required:"true"`
}
// PostableNodes is the request body for the v2 nodes list API.
type PostableNodes struct {
Start int64 `json:"start" required:"true"`
End int64 `json:"end" required:"true"`
Filter *qbtypes.Filter `json:"filter"`
GroupBy []qbtypes.GroupByKey `json:"groupBy"`
OrderBy *qbtypes.OrderBy `json:"orderBy"`
Offset int `json:"offset"`
Limit int `json:"limit" required:"true"`
}
// Validate ensures PostableNodes contains acceptable values.
func (req *PostableNodes) Validate() error {
if req == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "request is nil")
}
if req.Start <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid start time %d: start must be greater than 0",
req.Start,
)
}
if req.End <= 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid end time %d: end must be greater than 0",
req.End,
)
}
if req.Start >= req.End {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time range: start (%d) must be less than end (%d)",
req.Start,
req.End,
)
}
if req.Limit < 1 || req.Limit > 5000 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "limit must be between 1 and 5000")
}
if req.Offset < 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "offset cannot be negative")
}
if req.OrderBy != nil {
if !slices.Contains(NodesValidOrderByKeys, req.OrderBy.Key.Name) {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by key: %s", req.OrderBy.Key.Name)
}
if req.OrderBy.Direction != qbtypes.OrderDirectionAsc && req.OrderBy.Direction != qbtypes.OrderDirectionDesc {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by direction: %s", req.OrderBy.Direction)
}
}
return nil
}
// UnmarshalJSON validates input immediately after decoding.
func (req *PostableNodes) UnmarshalJSON(data []byte) error {
type raw PostableNodes
var decoded raw
if err := json.Unmarshal(data, &decoded); err != nil {
return err
}
*req = PostableNodes(decoded)
return req.Validate()
}

View File

@@ -0,0 +1,42 @@
package inframonitoringtypes
import "github.com/SigNoz/signoz/pkg/valuer"
type NodeCondition struct {
valuer.String
}
var (
NodeConditionReady = NodeCondition{valuer.NewString("ready")}
NodeConditionNotReady = NodeCondition{valuer.NewString("not_ready")}
NodeConditionNone = NodeCondition{valuer.NewString("")}
)
func (NodeCondition) Enum() []any {
return []any{
NodeConditionReady,
NodeConditionNotReady,
NodeConditionNone,
}
}
// Numeric values emitted by the k8s.node.condition_ready metric
// (source: OTel kubeletstats receiver).
const (
NodeConditionNumReady = 1
NodeConditionNumNotReady = 0
)
const (
NodesOrderByCPU = "cpu"
NodesOrderByCPUAllocatable = "cpu_allocatable"
NodesOrderByMemory = "memory"
NodesOrderByMemoryAllocatable = "memory_allocatable"
)
var NodesValidOrderByKeys = []string{
NodesOrderByCPU,
NodesOrderByCPUAllocatable,
NodesOrderByMemory,
NodesOrderByMemoryAllocatable,
}

View File

@@ -0,0 +1,255 @@
package inframonitoringtypes
import (
"testing"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/stretchr/testify/require"
)
func TestPostableNodes_Validate(t *testing.T) {
tests := []struct {
name string
req *PostableNodes
wantErr bool
}{
{
name: "valid request",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: false,
},
{
name: "nil request",
req: nil,
wantErr: true,
},
{
name: "start time zero",
req: &PostableNodes{
Start: 0,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "start time negative",
req: &PostableNodes{
Start: -1000,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "end time zero",
req: &PostableNodes{
Start: 1000,
End: 0,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "start time greater than end time",
req: &PostableNodes{
Start: 2000,
End: 1000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "start time equal to end time",
req: &PostableNodes{
Start: 1000,
End: 1000,
Limit: 100,
Offset: 0,
},
wantErr: true,
},
{
name: "limit zero",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 0,
Offset: 0,
},
wantErr: true,
},
{
name: "limit negative",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: -10,
Offset: 0,
},
wantErr: true,
},
{
name: "limit exceeds max",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 5001,
Offset: 0,
},
wantErr: true,
},
{
name: "offset negative",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: -5,
},
wantErr: true,
},
{
name: "orderBy nil is valid",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
},
wantErr: false,
},
{
name: "orderBy with valid key cpu and direction asc",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NodesOrderByCPU,
},
},
Direction: qbtypes.OrderDirectionAsc,
},
},
wantErr: false,
},
{
name: "orderBy with valid key cpu_allocatable and direction desc",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NodesOrderByCPUAllocatable,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
wantErr: false,
},
{
name: "orderBy with valid key memory_allocatable and direction asc",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NodesOrderByMemoryAllocatable,
},
},
Direction: qbtypes.OrderDirectionAsc,
},
},
wantErr: false,
},
{
name: "orderBy with condition key is rejected",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "condition",
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
wantErr: true,
},
{
name: "orderBy with invalid key",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: "unknown",
},
},
Direction: qbtypes.OrderDirectionDesc,
},
},
wantErr: true,
},
{
name: "orderBy with valid key but invalid direction",
req: &PostableNodes{
Start: 1000,
End: 2000,
Limit: 100,
Offset: 0,
OrderBy: &qbtypes.OrderBy{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: NodesOrderByMemory,
},
},
Direction: qbtypes.OrderDirection{String: valuer.NewString("invalid")},
},
},
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.req.Validate()
if tt.wantErr {
require.Error(t, err)
require.True(t, errors.Ast(err, errors.TypeInvalidInput), "expected error to be of type InvalidInput")
} else {
require.NoError(t, err)
}
})
}
}