mirror of
https://github.com/SigNoz/signoz.git
synced 2026-04-10 14:10:22 +01:00
Compare commits
39 Commits
debug-wal
...
infraM/v2_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
271f9b81ed | ||
|
|
6fa815c294 | ||
|
|
63ec518efb | ||
|
|
c4ca20dd90 | ||
|
|
e56cc4222b | ||
|
|
07d2944d7c | ||
|
|
dea01ae36a | ||
|
|
62ea5b54e2 | ||
|
|
e549a7e42f | ||
|
|
90e2ebb11f | ||
|
|
61baa1be7a | ||
|
|
b946fa665f | ||
|
|
2e049556e4 | ||
|
|
492a5e70d7 | ||
|
|
ba1f2771e8 | ||
|
|
7458fb4855 | ||
|
|
5f55f3938b | ||
|
|
3e8102485c | ||
|
|
861c682ea5 | ||
|
|
c8e5895dff | ||
|
|
82d72e7edb | ||
|
|
a3f8ecaaf1 | ||
|
|
19aada656c | ||
|
|
b21bb4280f | ||
|
|
bc0a4fdb5c | ||
|
|
37fb0e9254 | ||
|
|
aecfa1a174 | ||
|
|
b869d23d94 | ||
|
|
6ee3d44f76 | ||
|
|
462e554107 | ||
|
|
66afa73e6f | ||
|
|
54c604bcf4 | ||
|
|
c1be02ba54 | ||
|
|
d3c7ba8f45 | ||
|
|
039c4a0496 | ||
|
|
51a94b6bbc | ||
|
|
bbfbb94f52 | ||
|
|
d1eb9ef16f | ||
|
|
3db00f8bc3 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -51,8 +51,6 @@ ee/query-service/tests/test-deploy/data/
|
||||
# local data
|
||||
*.backup
|
||||
*.db
|
||||
*.db-shm
|
||||
*.db-wal
|
||||
**/db
|
||||
/deploy/docker/clickhouse-setup/data/
|
||||
/deploy/docker-swarm/clickhouse-setup/data/
|
||||
|
||||
@@ -190,7 +190,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.118.0
|
||||
image: signoz/signoz:v0.117.1
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
# - "6060:6060" # pprof port
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.118.0
|
||||
image: signoz/signoz:v0.117.1
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
volumes:
|
||||
|
||||
@@ -181,7 +181,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.118.0}
|
||||
image: signoz/signoz:${VERSION:-v0.117.1}
|
||||
container_name: signoz
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
|
||||
@@ -109,7 +109,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.118.0}
|
||||
image: signoz/signoz:${VERSION:-v0.117.1}
|
||||
container_name: signoz
|
||||
ports:
|
||||
- "8080:8080" # signoz port
|
||||
|
||||
@@ -403,65 +403,27 @@ components:
|
||||
required:
|
||||
- regions
|
||||
type: object
|
||||
CloudintegrationtypesAWSCloudWatchLogsSubscription:
|
||||
CloudintegrationtypesAWSCollectionStrategy:
|
||||
properties:
|
||||
filterPattern:
|
||||
type: string
|
||||
logGroupNamePrefix:
|
||||
type: string
|
||||
required:
|
||||
- logGroupNamePrefix
|
||||
- filterPattern
|
||||
type: object
|
||||
CloudintegrationtypesAWSCloudWatchMetricStreamFilter:
|
||||
properties:
|
||||
metricNames:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
namespace:
|
||||
type: string
|
||||
required:
|
||||
- namespace
|
||||
aws_logs:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSLogsStrategy'
|
||||
aws_metrics:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSMetricsStrategy'
|
||||
s3_buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
type: object
|
||||
CloudintegrationtypesAWSConnectionArtifact:
|
||||
properties:
|
||||
connectionUrl:
|
||||
connectionURL:
|
||||
type: string
|
||||
required:
|
||||
- connectionUrl
|
||||
- connectionURL
|
||||
type: object
|
||||
CloudintegrationtypesAWSIntegrationConfig:
|
||||
properties:
|
||||
enabledRegions:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
telemetryCollectionStrategy:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSTelemetryCollectionStrategy'
|
||||
required:
|
||||
- enabledRegions
|
||||
- telemetryCollectionStrategy
|
||||
type: object
|
||||
CloudintegrationtypesAWSLogsCollectionStrategy:
|
||||
properties:
|
||||
subscriptions:
|
||||
items:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCloudWatchLogsSubscription'
|
||||
type: array
|
||||
required:
|
||||
- subscriptions
|
||||
type: object
|
||||
CloudintegrationtypesAWSMetricsCollectionStrategy:
|
||||
properties:
|
||||
streamFilters:
|
||||
items:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCloudWatchMetricStreamFilter'
|
||||
type: array
|
||||
required:
|
||||
- streamFilters
|
||||
type: object
|
||||
CloudintegrationtypesAWSPostableAccountConfig:
|
||||
CloudintegrationtypesAWSConnectionArtifactRequest:
|
||||
properties:
|
||||
deploymentRegion:
|
||||
type: string
|
||||
@@ -473,6 +435,46 @@ components:
|
||||
- deploymentRegion
|
||||
- regions
|
||||
type: object
|
||||
CloudintegrationtypesAWSIntegrationConfig:
|
||||
properties:
|
||||
enabledRegions:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
telemetry:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
||||
required:
|
||||
- enabledRegions
|
||||
- telemetry
|
||||
type: object
|
||||
CloudintegrationtypesAWSLogsStrategy:
|
||||
properties:
|
||||
cloudwatch_logs_subscriptions:
|
||||
items:
|
||||
properties:
|
||||
filter_pattern:
|
||||
type: string
|
||||
log_group_name_prefix:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesAWSMetricsStrategy:
|
||||
properties:
|
||||
cloudwatch_metric_stream_filters:
|
||||
items:
|
||||
properties:
|
||||
MetricNames:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
Namespace:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesAWSServiceConfig:
|
||||
properties:
|
||||
logs:
|
||||
@@ -484,7 +486,7 @@ components:
|
||||
properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
s3Buckets:
|
||||
s3_buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
@@ -496,19 +498,6 @@ components:
|
||||
enabled:
|
||||
type: boolean
|
||||
type: object
|
||||
CloudintegrationtypesAWSTelemetryCollectionStrategy:
|
||||
properties:
|
||||
logs:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSLogsCollectionStrategy'
|
||||
metrics:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSMetricsCollectionStrategy'
|
||||
s3Buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
type: object
|
||||
CloudintegrationtypesAccount:
|
||||
properties:
|
||||
agentReport:
|
||||
@@ -572,26 +561,6 @@ components:
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesCloudIntegrationService:
|
||||
nullable: true
|
||||
properties:
|
||||
cloudIntegrationId:
|
||||
type: string
|
||||
config:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesServiceConfig'
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesServiceID'
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
CloudintegrationtypesCollectedLogAttribute:
|
||||
properties:
|
||||
name:
|
||||
@@ -612,6 +581,13 @@ components:
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
CloudintegrationtypesCollectionStrategy:
|
||||
properties:
|
||||
aws:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
||||
required:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesConnectionArtifact:
|
||||
properties:
|
||||
aws:
|
||||
@@ -619,21 +595,12 @@ components:
|
||||
required:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesCredentials:
|
||||
CloudintegrationtypesConnectionArtifactRequest:
|
||||
properties:
|
||||
ingestionKey:
|
||||
type: string
|
||||
ingestionUrl:
|
||||
type: string
|
||||
sigNozApiKey:
|
||||
type: string
|
||||
sigNozApiUrl:
|
||||
type: string
|
||||
aws:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSConnectionArtifactRequest'
|
||||
required:
|
||||
- sigNozApiUrl
|
||||
- sigNozApiKey
|
||||
- ingestionUrl
|
||||
- ingestionKey
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesDashboard:
|
||||
properties:
|
||||
@@ -659,7 +626,7 @@ components:
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesGettableAccountWithConnectionArtifact:
|
||||
CloudintegrationtypesGettableAccountWithArtifact:
|
||||
properties:
|
||||
connectionArtifact:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesConnectionArtifact'
|
||||
@@ -678,7 +645,7 @@ components:
|
||||
required:
|
||||
- accounts
|
||||
type: object
|
||||
CloudintegrationtypesGettableAgentCheckIn:
|
||||
CloudintegrationtypesGettableAgentCheckInResponse:
|
||||
properties:
|
||||
account_id:
|
||||
type: string
|
||||
@@ -727,72 +694,12 @@ components:
|
||||
type: string
|
||||
type: array
|
||||
telemetry:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesOldAWSCollectionStrategy'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
||||
required:
|
||||
- enabled_regions
|
||||
- telemetry
|
||||
type: object
|
||||
CloudintegrationtypesOldAWSCollectionStrategy:
|
||||
properties:
|
||||
aws_logs:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesOldAWSLogsStrategy'
|
||||
aws_metrics:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesOldAWSMetricsStrategy'
|
||||
provider:
|
||||
type: string
|
||||
s3_buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
type: object
|
||||
CloudintegrationtypesOldAWSLogsStrategy:
|
||||
properties:
|
||||
cloudwatch_logs_subscriptions:
|
||||
items:
|
||||
properties:
|
||||
filter_pattern:
|
||||
type: string
|
||||
log_group_name_prefix:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesOldAWSMetricsStrategy:
|
||||
properties:
|
||||
cloudwatch_metric_stream_filters:
|
||||
items:
|
||||
properties:
|
||||
MetricNames:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
Namespace:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesPostableAccount:
|
||||
properties:
|
||||
config:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAccountConfig'
|
||||
credentials:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesCredentials'
|
||||
required:
|
||||
- config
|
||||
- credentials
|
||||
type: object
|
||||
CloudintegrationtypesPostableAccountConfig:
|
||||
properties:
|
||||
aws:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSPostableAccountConfig'
|
||||
required:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesPostableAgentCheckIn:
|
||||
CloudintegrationtypesPostableAgentCheckInRequest:
|
||||
properties:
|
||||
account_id:
|
||||
type: string
|
||||
@@ -820,8 +727,6 @@ components:
|
||||
properties:
|
||||
assets:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAssets'
|
||||
cloudIntegrationService:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesCloudIntegrationService'
|
||||
dataCollected:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesDataCollected'
|
||||
icon:
|
||||
@@ -830,10 +735,12 @@ components:
|
||||
type: string
|
||||
overview:
|
||||
type: string
|
||||
supportedSignals:
|
||||
serviceConfig:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesServiceConfig'
|
||||
supported_signals:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesSupportedSignals'
|
||||
telemetryCollectionStrategy:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesTelemetryCollectionStrategy'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesCollectionStrategy'
|
||||
title:
|
||||
type: string
|
||||
required:
|
||||
@@ -842,10 +749,9 @@ components:
|
||||
- icon
|
||||
- overview
|
||||
- assets
|
||||
- supportedSignals
|
||||
- supported_signals
|
||||
- dataCollected
|
||||
- telemetryCollectionStrategy
|
||||
- cloudIntegrationService
|
||||
type: object
|
||||
CloudintegrationtypesServiceConfig:
|
||||
properties:
|
||||
@@ -854,22 +760,6 @@ components:
|
||||
required:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesServiceID:
|
||||
enum:
|
||||
- alb
|
||||
- api-gateway
|
||||
- dynamodb
|
||||
- ec2
|
||||
- ecs
|
||||
- eks
|
||||
- elasticache
|
||||
- lambda
|
||||
- msk
|
||||
- rds
|
||||
- s3sync
|
||||
- sns
|
||||
- sqs
|
||||
type: string
|
||||
CloudintegrationtypesServiceMetadata:
|
||||
properties:
|
||||
enabled:
|
||||
@@ -893,13 +783,6 @@ components:
|
||||
metrics:
|
||||
type: boolean
|
||||
type: object
|
||||
CloudintegrationtypesTelemetryCollectionStrategy:
|
||||
properties:
|
||||
aws:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSTelemetryCollectionStrategy'
|
||||
required:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesUpdatableAccount:
|
||||
properties:
|
||||
config:
|
||||
@@ -1215,6 +1098,148 @@ components:
|
||||
enabled:
|
||||
type: boolean
|
||||
type: object
|
||||
InframonitoringtypesHostRecord:
|
||||
properties:
|
||||
cpu:
|
||||
format: double
|
||||
type: number
|
||||
diskUsage:
|
||||
format: double
|
||||
type: number
|
||||
hostName:
|
||||
type: string
|
||||
load15:
|
||||
format: double
|
||||
type: number
|
||||
memory:
|
||||
format: double
|
||||
type: number
|
||||
meta:
|
||||
additionalProperties: {}
|
||||
nullable: true
|
||||
type: object
|
||||
status:
|
||||
type: string
|
||||
wait:
|
||||
format: double
|
||||
type: number
|
||||
type: object
|
||||
InframonitoringtypesHostStatus:
|
||||
enum:
|
||||
- active
|
||||
- inactive
|
||||
- ""
|
||||
type: string
|
||||
InframonitoringtypesHostsListRequest:
|
||||
properties:
|
||||
end:
|
||||
format: int64
|
||||
type: integer
|
||||
filter:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
filterByStatus:
|
||||
$ref: '#/components/schemas/InframonitoringtypesHostStatus'
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
nullable: true
|
||||
type: array
|
||||
limit:
|
||||
type: integer
|
||||
offset:
|
||||
type: integer
|
||||
orderBy:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
type: object
|
||||
InframonitoringtypesHostsListResponse:
|
||||
properties:
|
||||
endTimeBeforeRetention:
|
||||
type: boolean
|
||||
records:
|
||||
items:
|
||||
$ref: '#/components/schemas/InframonitoringtypesHostRecord'
|
||||
nullable: true
|
||||
type: array
|
||||
sentAnyMetricsData:
|
||||
type: boolean
|
||||
total:
|
||||
type: integer
|
||||
type:
|
||||
type: string
|
||||
type: object
|
||||
InframonitoringtypesPodRecord:
|
||||
properties:
|
||||
meta:
|
||||
additionalProperties: {}
|
||||
nullable: true
|
||||
type: object
|
||||
podAge:
|
||||
format: int64
|
||||
type: integer
|
||||
podCPU:
|
||||
format: double
|
||||
type: number
|
||||
podCPULimit:
|
||||
format: double
|
||||
type: number
|
||||
podCPURequest:
|
||||
format: double
|
||||
type: number
|
||||
podMemory:
|
||||
format: double
|
||||
type: number
|
||||
podMemoryLimit:
|
||||
format: double
|
||||
type: number
|
||||
podMemoryRequest:
|
||||
format: double
|
||||
type: number
|
||||
podPhase:
|
||||
type: string
|
||||
podUID:
|
||||
type: string
|
||||
type: object
|
||||
InframonitoringtypesPodsListRequest:
|
||||
properties:
|
||||
end:
|
||||
format: int64
|
||||
type: integer
|
||||
filter:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
nullable: true
|
||||
type: array
|
||||
limit:
|
||||
type: integer
|
||||
offset:
|
||||
type: integer
|
||||
orderBy:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
type: object
|
||||
InframonitoringtypesPodsListResponse:
|
||||
properties:
|
||||
endTimeBeforeRetention:
|
||||
type: boolean
|
||||
records:
|
||||
items:
|
||||
$ref: '#/components/schemas/InframonitoringtypesPodRecord'
|
||||
nullable: true
|
||||
type: array
|
||||
sentAnyMetricsData:
|
||||
type: boolean
|
||||
total:
|
||||
type: integer
|
||||
type:
|
||||
type: string
|
||||
type: object
|
||||
MetricsexplorertypesInspectMetricsRequest:
|
||||
properties:
|
||||
end:
|
||||
@@ -3198,7 +3223,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckIn'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckInRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
@@ -3206,7 +3231,7 @@ paths:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckIn'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckInResponse'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
@@ -3307,7 +3332,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAccount'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesConnectionArtifactRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
@@ -3315,7 +3340,7 @@ paths:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAccountWithConnectionArtifact'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAccountWithArtifact'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
@@ -3511,61 +3536,6 @@ paths:
|
||||
summary: Update account
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}/services/{service_id}:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates a service for the specified cloud provider
|
||||
operationId: UpdateService
|
||||
parameters:
|
||||
- in: path
|
||||
name: cloud_provider
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: service_id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesUpdatableService'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Update service
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/accounts/check_in:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -3581,7 +3551,7 @@ paths:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckIn'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckInRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
@@ -3589,7 +3559,7 @@ paths:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckIn'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckInResponse'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
@@ -3623,59 +3593,6 @@ paths:
|
||||
summary: Agent check-in
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/credentials:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint retrieves the connection credentials required for
|
||||
integration
|
||||
operationId: GetConnectionCredentials
|
||||
parameters:
|
||||
- in: path
|
||||
name: cloud_provider
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesCredentials'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get connection credentials
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/services:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -3786,6 +3703,55 @@ paths:
|
||||
summary: Get service
|
||||
tags:
|
||||
- cloudintegration
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates a service for the specified cloud provider
|
||||
operationId: UpdateService
|
||||
parameters:
|
||||
- in: path
|
||||
name: cloud_provider
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: service_id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesUpdatableService'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Update service
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/complete/google:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -7356,6 +7322,122 @@ paths:
|
||||
summary: Health check
|
||||
tags:
|
||||
- health
|
||||
/api/v2/infra-monitoring/hosts/list:
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoint returns a list of hosts along with other information
|
||||
for each of them
|
||||
operationId: HostsList
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/InframonitoringtypesHostsListRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/InframonitoringtypesHostsListResponse'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: List Hosts for Infra Monitoring
|
||||
tags:
|
||||
- infra-monitoring
|
||||
/api/v2/infra-monitoring/pods/list:
|
||||
post:
|
||||
deprecated: false
|
||||
description: This endpoint returns a list of pods along with metrics and metadata
|
||||
for each of them
|
||||
operationId: PodsList
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/InframonitoringtypesPodsListRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/InframonitoringtypesPodsListResponse'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: List Pods for Infra Monitoring
|
||||
tags:
|
||||
- infra-monitoring
|
||||
/api/v2/livez:
|
||||
get:
|
||||
deprecated: false
|
||||
|
||||
@@ -49,6 +49,7 @@ func NewAnomalyRule(
|
||||
logger *slog.Logger,
|
||||
opts ...baserules.RuleOption,
|
||||
) (*AnomalyRule, error) {
|
||||
|
||||
logger.Info("creating new AnomalyRule", slog.String("rule.id", id))
|
||||
|
||||
opts = append(opts, baserules.WithLogger(logger))
|
||||
@@ -58,44 +59,44 @@ func NewAnomalyRule(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r := AnomalyRule{
|
||||
t := AnomalyRule{
|
||||
BaseRule: baseRule,
|
||||
querier: querier,
|
||||
version: p.Version,
|
||||
logger: logger.With(slog.String("rule.id", id)),
|
||||
}
|
||||
|
||||
switch p.RuleCondition.Seasonality {
|
||||
case ruletypes.SeasonalityHourly:
|
||||
r.seasonality = anomaly.SeasonalityHourly
|
||||
t.seasonality = anomaly.SeasonalityHourly
|
||||
case ruletypes.SeasonalityDaily:
|
||||
r.seasonality = anomaly.SeasonalityDaily
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
case ruletypes.SeasonalityWeekly:
|
||||
r.seasonality = anomaly.SeasonalityWeekly
|
||||
t.seasonality = anomaly.SeasonalityWeekly
|
||||
default:
|
||||
r.seasonality = anomaly.SeasonalityDaily
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
}
|
||||
|
||||
r.logger.Info("using seasonality", slog.String("rule.seasonality", r.seasonality.StringValue()))
|
||||
logger.Info("using seasonality", slog.String("rule.id", id), slog.String("rule.seasonality", t.seasonality.StringValue()))
|
||||
|
||||
if r.seasonality == anomaly.SeasonalityHourly {
|
||||
r.provider = anomaly.NewHourlyProvider(
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithQuerier[*anomaly.HourlyProvider](querier),
|
||||
anomaly.WithLogger[*anomaly.HourlyProvider](r.logger),
|
||||
anomaly.WithLogger[*anomaly.HourlyProvider](logger),
|
||||
)
|
||||
} else if r.seasonality == anomaly.SeasonalityDaily {
|
||||
r.provider = anomaly.NewDailyProvider(
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithQuerier[*anomaly.DailyProvider](querier),
|
||||
anomaly.WithLogger[*anomaly.DailyProvider](r.logger),
|
||||
anomaly.WithLogger[*anomaly.DailyProvider](logger),
|
||||
)
|
||||
} else if r.seasonality == anomaly.SeasonalityWeekly {
|
||||
r.provider = anomaly.NewWeeklyProvider(
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithQuerier[*anomaly.WeeklyProvider](querier),
|
||||
anomaly.WithLogger[*anomaly.WeeklyProvider](r.logger),
|
||||
anomaly.WithLogger[*anomaly.WeeklyProvider](logger),
|
||||
)
|
||||
}
|
||||
|
||||
return &r, nil
|
||||
t.querier = querier
|
||||
t.version = p.Version
|
||||
t.logger = logger
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
@@ -103,11 +104,8 @@ func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) *qbtypes.QueryRangeRequest {
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request", slog.Int64("ts", ts.UnixMilli()),
|
||||
slog.Int64("eval.window_ms", r.EvalWindow().Milliseconds()),
|
||||
slog.Int64("eval.delay_ms", r.EvalDelay().Milliseconds()),
|
||||
)
|
||||
|
||||
r.logger.InfoContext(ctx, "prepare query range request", slog.String("rule.id", r.ID()), slog.Int64("ts", ts.UnixMilli()), slog.Int64("eval.window_ms", r.EvalWindow().Milliseconds()), slog.Int64("eval.delay_ms", r.EvalDelay().Milliseconds()))
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
@@ -147,7 +145,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
}
|
||||
|
||||
if queryResult == nil {
|
||||
r.logger.WarnContext(ctx, "nil qb result", slog.Int64("ts", ts.UnixMilli()))
|
||||
r.logger.WarnContext(ctx, "nil qb result", slog.String("rule.id", r.ID()), slog.Int64("ts", ts.UnixMilli()))
|
||||
return ruletypes.Vector{}, nil
|
||||
}
|
||||
|
||||
@@ -158,7 +156,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
} else if !hasData {
|
||||
r.logger.WarnContext(ctx, "no anomaly result")
|
||||
r.logger.WarnContext(ctx, "no anomaly result", slog.String("rule.id", r.ID()))
|
||||
return ruletypes.Vector{}, nil
|
||||
}
|
||||
|
||||
@@ -166,7 +164,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.Aggregations[0].AnomalyScores)
|
||||
// TODO(srikanthccv): this could be noisy but we do this to answer false alert requests
|
||||
r.logger.InfoContext(ctx, "anomaly scores", slog.String("anomaly.scores", string(scoresJSON)))
|
||||
r.logger.InfoContext(ctx, "anomaly scores", slog.String("rule.id", r.ID()), slog.String("anomaly.scores", string(scoresJSON)))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.Aggregations[0].AnomalyScores
|
||||
@@ -174,7 +172,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", errors.Attr(filterErr))
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", slog.String("rule.id", r.ID()), errors.Attr(filterErr))
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
@@ -182,11 +180,7 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "not enough data points to evaluate series, skipping",
|
||||
slog.Int("series.num_points", len(series.Values)),
|
||||
slog.Int("series.required_points", r.Condition().RequiredNumPoints),
|
||||
)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", slog.String("rule.id", r.ID()), slog.Int("series.num_points", len(series.Values)), slog.Int("series.required_points", r.Condition().RequiredNumPoints))
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(series, r.Unit(), ruletypes.EvalData{
|
||||
@@ -210,7 +204,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
var res ruletypes.Vector
|
||||
var err error
|
||||
|
||||
r.logger.InfoContext(ctx, "running query")
|
||||
r.logger.InfoContext(ctx, "running query", slog.String("rule.id", r.ID()))
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
|
||||
if err != nil {
|
||||
@@ -236,10 +230,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
|
||||
r.logger.DebugContext(
|
||||
ctx, "alert template data for rule", slog.String("formatter.name", valueFormatter.Name()),
|
||||
slog.String("alert.value", value), slog.String("alert.threshold", threshold),
|
||||
)
|
||||
r.logger.DebugContext(ctx, "alert template data for rule", slog.String("rule.id", r.ID()), slog.String("formatter.name", valueFormatter.Name()), slog.String("alert.value", value), slog.String("alert.threshold", threshold))
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
@@ -259,7 +250,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
r.logger.ErrorContext(ctx, "expanding alert template failed", errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
r.logger.ErrorContext(ctx, "expanding alert template failed", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -289,7 +280,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
resultFPs[h] = struct{}{}
|
||||
|
||||
if _, ok := alerts[h]; ok {
|
||||
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", slog.Any("alert", alerts[h]))
|
||||
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", slog.String("rule.id", r.ID()), slog.Any("alert", alerts[h]))
|
||||
err = errors.NewInternalf(errors.CodeInternal, "duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
return 0, err
|
||||
}
|
||||
@@ -308,7 +299,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.Int("alert.count", len(alerts)))
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.String("rule.id", r.ID()), slog.Int("alert.count", len(alerts)))
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
@@ -335,7 +326,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
for fp, a := range r.Active {
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLabels)
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), slog.Any("alert.labels", a.Labels))
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.labels", a.Labels))
|
||||
}
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
@@ -390,7 +381,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
state = ruletypes.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.Any("alert.state", state))
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.String("rule.id", r.ID()), slog.Any("alert.state", state))
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
@@ -413,7 +404,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
itemsToAdd[idx] = item
|
||||
}
|
||||
|
||||
_ = r.RecordRuleStateHistory(ctx, itemsToAdd)
|
||||
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
|
||||
|
||||
return len(r.Active), nil
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ReactChild, useCallback, useMemo } from 'react';
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { matchPath, Redirect, useLocation } from 'react-router-dom';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
@@ -8,10 +8,12 @@ import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ORG_PREFERENCES } from 'constants/orgPreferences';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||
import { OrgPreference } from 'types/api/preferences/preference';
|
||||
import { Organization } from 'types/api/user/getOrganization';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { routePermission } from 'utils/permission';
|
||||
|
||||
@@ -23,7 +25,6 @@ import routes, {
|
||||
SUPPORT_ROUTE,
|
||||
} from './routes';
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
const location = useLocation();
|
||||
const { pathname } = location;
|
||||
@@ -56,12 +57,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
const currentRoute = mapRoutes.get('current');
|
||||
const { isCloudUser: isCloudUserVal } = useGetTenantLicense();
|
||||
|
||||
const orgData = useMemo(() => {
|
||||
if (org && org.length > 0 && org[0].id !== undefined) {
|
||||
return org[0];
|
||||
}
|
||||
return undefined;
|
||||
}, [org]);
|
||||
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
|
||||
|
||||
const { data: usersData, isFetching: isFetchingUsers } = useListUsers({
|
||||
query: {
|
||||
@@ -79,7 +75,214 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
return remainingUsers.length === 1;
|
||||
}, [usersData?.data]);
|
||||
|
||||
// Handle old routes - redirect to new routes
|
||||
useEffect(() => {
|
||||
if (
|
||||
isCloudUserVal &&
|
||||
!isFetchingOrgPreferences &&
|
||||
orgPreferences &&
|
||||
!isFetchingUsers &&
|
||||
usersData &&
|
||||
usersData.data
|
||||
) {
|
||||
const isOnboardingComplete = orgPreferences?.find(
|
||||
(preference: OrgPreference) =>
|
||||
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
|
||||
)?.value;
|
||||
|
||||
// Don't redirect to onboarding if workspace has issues (blocked, suspended, or restricted)
|
||||
// User needs access to settings/billing to fix payment issues
|
||||
const isWorkspaceBlocked = trialInfo?.workSpaceBlock;
|
||||
const isWorkspaceSuspended = activeLicense?.state === LicenseState.DEFAULTED;
|
||||
const isWorkspaceAccessRestricted =
|
||||
activeLicense?.state === LicenseState.TERMINATED ||
|
||||
activeLicense?.state === LicenseState.EXPIRED ||
|
||||
activeLicense?.state === LicenseState.CANCELLED;
|
||||
|
||||
const hasWorkspaceIssue =
|
||||
isWorkspaceBlocked || isWorkspaceSuspended || isWorkspaceAccessRestricted;
|
||||
|
||||
if (hasWorkspaceIssue) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isFirstUser = checkFirstTimeUser();
|
||||
if (
|
||||
isFirstUser &&
|
||||
!isOnboardingComplete &&
|
||||
// if the current route is allowed to be overriden by org onboarding then only do the same
|
||||
!ROUTES_NOT_TO_BE_OVERRIDEN.includes(pathname)
|
||||
) {
|
||||
history.push(ROUTES.ONBOARDING);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
checkFirstTimeUser,
|
||||
isCloudUserVal,
|
||||
isFetchingOrgPreferences,
|
||||
isFetchingUsers,
|
||||
orgPreferences,
|
||||
usersData,
|
||||
pathname,
|
||||
trialInfo?.workSpaceBlock,
|
||||
activeLicense?.state,
|
||||
]);
|
||||
|
||||
const navigateToWorkSpaceBlocked = useCallback((): void => {
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(pathname === ROUTES.SETTINGS ||
|
||||
pathname === ROUTES.ORG_SETTINGS ||
|
||||
pathname === ROUTES.MEMBERS_SETTINGS ||
|
||||
pathname === ROUTES.BILLING ||
|
||||
pathname === ROUTES.MY_SETTINGS);
|
||||
|
||||
if (
|
||||
pathname &&
|
||||
pathname !== ROUTES.WORKSPACE_LOCKED &&
|
||||
!isRouteEnabledForWorkspaceBlockedState
|
||||
) {
|
||||
history.push(ROUTES.WORKSPACE_LOCKED);
|
||||
}
|
||||
}, [isAdmin, pathname]);
|
||||
|
||||
const navigateToWorkSpaceAccessRestricted = useCallback((): void => {
|
||||
if (pathname && pathname !== ROUTES.WORKSPACE_ACCESS_RESTRICTED) {
|
||||
history.push(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicense && activeLicense) {
|
||||
const isTerminated = activeLicense.state === LicenseState.TERMINATED;
|
||||
const isExpired = activeLicense.state === LicenseState.EXPIRED;
|
||||
const isCancelled = activeLicense.state === LicenseState.CANCELLED;
|
||||
|
||||
const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled;
|
||||
|
||||
const { platform } = activeLicense;
|
||||
|
||||
if (isWorkspaceAccessRestricted && platform === LicensePlatform.CLOUD) {
|
||||
navigateToWorkSpaceAccessRestricted();
|
||||
}
|
||||
}
|
||||
}, [
|
||||
isFetchingActiveLicense,
|
||||
activeLicense,
|
||||
navigateToWorkSpaceAccessRestricted,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicense) {
|
||||
const shouldBlockWorkspace = trialInfo?.workSpaceBlock;
|
||||
|
||||
if (
|
||||
shouldBlockWorkspace &&
|
||||
activeLicense?.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
navigateToWorkSpaceBlocked();
|
||||
}
|
||||
}
|
||||
}, [
|
||||
isFetchingActiveLicense,
|
||||
trialInfo?.workSpaceBlock,
|
||||
activeLicense?.platform,
|
||||
navigateToWorkSpaceBlocked,
|
||||
]);
|
||||
|
||||
const navigateToWorkSpaceSuspended = useCallback((): void => {
|
||||
if (pathname && pathname !== ROUTES.WORKSPACE_SUSPENDED) {
|
||||
history.push(ROUTES.WORKSPACE_SUSPENDED);
|
||||
}
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFetchingActiveLicense && activeLicense) {
|
||||
const shouldSuspendWorkspace =
|
||||
activeLicense.state === LicenseState.DEFAULTED;
|
||||
|
||||
if (
|
||||
shouldSuspendWorkspace &&
|
||||
activeLicense.platform === LicensePlatform.CLOUD
|
||||
) {
|
||||
navigateToWorkSpaceSuspended();
|
||||
}
|
||||
}
|
||||
}, [isFetchingActiveLicense, activeLicense, navigateToWorkSpaceSuspended]);
|
||||
|
||||
useEffect(() => {
|
||||
if (org && org.length > 0 && org[0].id !== undefined) {
|
||||
setOrgData(org[0]);
|
||||
}
|
||||
}, [org]);
|
||||
|
||||
// if the feature flag is enabled and the current route is /get-started then redirect to /get-started-with-signoz-cloud
|
||||
useEffect(() => {
|
||||
if (
|
||||
currentRoute?.path === ROUTES.GET_STARTED &&
|
||||
featureFlags?.find((e) => e.name === FeatureKeys.ONBOARDING_V3)?.active
|
||||
) {
|
||||
history.push(ROUTES.GET_STARTED_WITH_CLOUD);
|
||||
}
|
||||
}, [currentRoute, featureFlags]);
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect(() => {
|
||||
// if it is an old route navigate to the new route
|
||||
if (isOldRoute) {
|
||||
// this will be handled by the redirect component below
|
||||
return;
|
||||
}
|
||||
|
||||
// if the current route is public dashboard then don't redirect to login
|
||||
const isPublicDashboard = currentRoute?.path === ROUTES.PUBLIC_DASHBOARD;
|
||||
|
||||
if (isPublicDashboard) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if the current route
|
||||
if (currentRoute) {
|
||||
const { isPrivate, key } = currentRoute;
|
||||
if (isPrivate) {
|
||||
if (isLoggedInState) {
|
||||
const route = routePermission[key];
|
||||
if (route && route.find((e) => e === user.role) === undefined) {
|
||||
history.push(ROUTES.UN_AUTHORIZED);
|
||||
}
|
||||
} else {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
history.push(ROUTES.LOGIN);
|
||||
}
|
||||
} else if (isLoggedInState) {
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
history.push(fromPathname);
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
} else if (pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
history.push(ROUTES.HOME);
|
||||
}
|
||||
} else {
|
||||
// do nothing as the unauthenticated routes are LOGIN and SIGNUP and the LOGIN container takes care of routing to signup if
|
||||
// setup is not completed
|
||||
}
|
||||
} else if (isLoggedInState) {
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
history.push(fromPathname);
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
} else {
|
||||
history.push(ROUTES.HOME);
|
||||
}
|
||||
} else {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
history.push(ROUTES.LOGIN);
|
||||
}
|
||||
}, [isLoggedInState, pathname, user, isOldRoute, currentRoute, location]);
|
||||
|
||||
if (isOldRoute) {
|
||||
const redirectUrl = oldNewRoutesMapping[pathname];
|
||||
return (
|
||||
@@ -93,143 +296,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
// Public dashboard - no redirect needed
|
||||
const isPublicDashboard = currentRoute?.path === ROUTES.PUBLIC_DASHBOARD;
|
||||
if (isPublicDashboard) {
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
// Check for workspace access restriction (cloud only)
|
||||
const isCloudPlatform = activeLicense?.platform === LicensePlatform.CLOUD;
|
||||
|
||||
if (!isFetchingActiveLicense && activeLicense && isCloudPlatform) {
|
||||
const isTerminated = activeLicense.state === LicenseState.TERMINATED;
|
||||
const isExpired = activeLicense.state === LicenseState.EXPIRED;
|
||||
const isCancelled = activeLicense.state === LicenseState.CANCELLED;
|
||||
const isWorkspaceAccessRestricted = isTerminated || isExpired || isCancelled;
|
||||
|
||||
if (
|
||||
isWorkspaceAccessRestricted &&
|
||||
pathname !== ROUTES.WORKSPACE_ACCESS_RESTRICTED
|
||||
) {
|
||||
return <Redirect to={ROUTES.WORKSPACE_ACCESS_RESTRICTED} />;
|
||||
}
|
||||
|
||||
// Check for workspace suspended (DEFAULTED)
|
||||
const shouldSuspendWorkspace = activeLicense.state === LicenseState.DEFAULTED;
|
||||
if (shouldSuspendWorkspace && pathname !== ROUTES.WORKSPACE_SUSPENDED) {
|
||||
return <Redirect to={ROUTES.WORKSPACE_SUSPENDED} />;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for workspace blocked (trial expired)
|
||||
if (!isFetchingActiveLicense && isCloudPlatform && trialInfo?.workSpaceBlock) {
|
||||
const isRouteEnabledForWorkspaceBlockedState =
|
||||
isAdmin &&
|
||||
(pathname === ROUTES.SETTINGS ||
|
||||
pathname === ROUTES.ORG_SETTINGS ||
|
||||
pathname === ROUTES.MEMBERS_SETTINGS ||
|
||||
pathname === ROUTES.BILLING ||
|
||||
pathname === ROUTES.MY_SETTINGS);
|
||||
|
||||
if (
|
||||
pathname !== ROUTES.WORKSPACE_LOCKED &&
|
||||
!isRouteEnabledForWorkspaceBlockedState
|
||||
) {
|
||||
return <Redirect to={ROUTES.WORKSPACE_LOCKED} />;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for onboarding redirect (cloud users, first user, onboarding not complete)
|
||||
if (
|
||||
isCloudUserVal &&
|
||||
!isFetchingOrgPreferences &&
|
||||
orgPreferences &&
|
||||
!isFetchingUsers &&
|
||||
usersData &&
|
||||
usersData.data
|
||||
) {
|
||||
const isOnboardingComplete = orgPreferences?.find(
|
||||
(preference: OrgPreference) =>
|
||||
preference.name === ORG_PREFERENCES.ORG_ONBOARDING,
|
||||
)?.value;
|
||||
|
||||
// Don't redirect to onboarding if workspace has issues
|
||||
const isWorkspaceBlocked = trialInfo?.workSpaceBlock;
|
||||
const isWorkspaceSuspended = activeLicense?.state === LicenseState.DEFAULTED;
|
||||
const isWorkspaceAccessRestricted =
|
||||
activeLicense?.state === LicenseState.TERMINATED ||
|
||||
activeLicense?.state === LicenseState.EXPIRED ||
|
||||
activeLicense?.state === LicenseState.CANCELLED;
|
||||
|
||||
const hasWorkspaceIssue =
|
||||
isWorkspaceBlocked || isWorkspaceSuspended || isWorkspaceAccessRestricted;
|
||||
|
||||
if (!hasWorkspaceIssue) {
|
||||
const isFirstUser = checkFirstTimeUser();
|
||||
if (
|
||||
isFirstUser &&
|
||||
!isOnboardingComplete &&
|
||||
!ROUTES_NOT_TO_BE_OVERRIDEN.includes(pathname) &&
|
||||
pathname !== ROUTES.ONBOARDING
|
||||
) {
|
||||
return <Redirect to={ROUTES.ONBOARDING} />;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for GET_STARTED → GET_STARTED_WITH_CLOUD redirect (feature flag)
|
||||
if (
|
||||
currentRoute?.path === ROUTES.GET_STARTED &&
|
||||
featureFlags?.find((e) => e.name === FeatureKeys.ONBOARDING_V3)?.active
|
||||
) {
|
||||
return <Redirect to={ROUTES.GET_STARTED_WITH_CLOUD} />;
|
||||
}
|
||||
|
||||
// Main routing logic
|
||||
if (currentRoute) {
|
||||
const { isPrivate, key } = currentRoute;
|
||||
if (isPrivate) {
|
||||
if (isLoggedInState) {
|
||||
const route = routePermission[key];
|
||||
if (route && route.find((e) => e === user.role) === undefined) {
|
||||
return <Redirect to={ROUTES.UN_AUTHORIZED} />;
|
||||
}
|
||||
} else {
|
||||
// Save current path and redirect to login
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
return <Redirect to={ROUTES.LOGIN} />;
|
||||
}
|
||||
} else if (isLoggedInState) {
|
||||
// Non-private route, but user is logged in
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
return <Redirect to={fromPathname} />;
|
||||
}
|
||||
if (pathname !== ROUTES.SOMETHING_WENT_WRONG) {
|
||||
return <Redirect to={ROUTES.HOME} />;
|
||||
}
|
||||
}
|
||||
// Non-private route, user not logged in - let login/signup pages handle it
|
||||
} else if (isLoggedInState) {
|
||||
// Unknown route, logged in
|
||||
const fromPathname = getLocalStorageApi(
|
||||
LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT,
|
||||
);
|
||||
if (fromPathname) {
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, '');
|
||||
return <Redirect to={fromPathname} />;
|
||||
}
|
||||
return <Redirect to={ROUTES.HOME} />;
|
||||
} else {
|
||||
// Unknown route, not logged in
|
||||
setLocalStorageApi(LOCALSTORAGE.UNAUTHENTICATED_ROUTE_HIT, pathname);
|
||||
return <Redirect to={ROUTES.LOGIN} />;
|
||||
}
|
||||
|
||||
// NOTE: disabling this rule as there is no need to have div
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ORG_PREFERENCES } from 'constants/orgPreferences';
|
||||
import ROUTES from 'constants/routes';
|
||||
import history from 'lib/history';
|
||||
import { AppContext } from 'providers/App/App';
|
||||
import { IAppContext, IUser } from 'providers/App/types';
|
||||
import {
|
||||
@@ -21,6 +22,19 @@ import { ROLES, USER_ROLES } from 'types/roles';
|
||||
|
||||
import PrivateRoute from '../Private';
|
||||
|
||||
// Mock history module
|
||||
jest.mock('lib/history', () => ({
|
||||
__esModule: true,
|
||||
default: {
|
||||
push: jest.fn(),
|
||||
location: { pathname: '/', search: '', hash: '' },
|
||||
listen: jest.fn(),
|
||||
createHref: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const mockHistoryPush = history.push as jest.Mock;
|
||||
|
||||
// Mock localStorage APIs
|
||||
const mockLocalStorage: Record<string, string> = {};
|
||||
jest.mock('api/browser/localstorage/get', () => ({
|
||||
@@ -225,18 +239,20 @@ function renderPrivateRoute(options: RenderPrivateRouteOptions = {}): void {
|
||||
}
|
||||
|
||||
// Generic assertion helpers for navigation behavior
|
||||
// Using location-based assertions since Private.tsx now uses Redirect component
|
||||
// Using these allows easier refactoring when switching from history.push to Redirect component
|
||||
|
||||
async function assertRedirectsTo(targetRoute: string): Promise<void> {
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('location-display')).toHaveTextContent(targetRoute);
|
||||
expect(mockHistoryPush).toHaveBeenCalledWith(targetRoute);
|
||||
});
|
||||
}
|
||||
|
||||
function assertStaysOnRoute(expectedRoute: string): void {
|
||||
expect(screen.getByTestId('location-display')).toHaveTextContent(
|
||||
expectedRoute,
|
||||
);
|
||||
function assertNoRedirect(): void {
|
||||
expect(mockHistoryPush).not.toHaveBeenCalled();
|
||||
}
|
||||
|
||||
function assertDoesNotRedirectTo(targetRoute: string): void {
|
||||
expect(mockHistoryPush).not.toHaveBeenCalledWith(targetRoute);
|
||||
}
|
||||
|
||||
function assertRendersChildren(): void {
|
||||
@@ -334,7 +350,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
assertRendersChildren();
|
||||
assertStaysOnRoute('/public/dashboard/abc123');
|
||||
assertNoRedirect();
|
||||
});
|
||||
|
||||
it('should render children for public dashboard route when logged in without redirecting', () => {
|
||||
@@ -346,7 +362,7 @@ describe('PrivateRoute', () => {
|
||||
assertRendersChildren();
|
||||
// Critical: without the isPublicDashboard early return, logged-in users
|
||||
// would be redirected to HOME due to the non-private route handling
|
||||
assertStaysOnRoute('/public/dashboard/abc123');
|
||||
assertNoRedirect();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -404,7 +420,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
assertRendersChildren();
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertNoRedirect();
|
||||
});
|
||||
|
||||
it('should redirect to unauthorized when VIEWER tries to access admin-only route /alerts/new', async () => {
|
||||
@@ -513,7 +529,7 @@ describe('PrivateRoute', () => {
|
||||
appContext: { isLoggedIn: true },
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.SOMETHING_WENT_WRONG);
|
||||
assertDoesNotRedirectTo(ROUTES.HOME);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -525,7 +541,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
// Should not redirect - login page handles its own routing
|
||||
assertStaysOnRoute(ROUTES.LOGIN);
|
||||
assertNoRedirect();
|
||||
});
|
||||
|
||||
it('should not redirect when not logged in user visits signup page', () => {
|
||||
@@ -534,7 +550,7 @@ describe('PrivateRoute', () => {
|
||||
appContext: { isLoggedIn: false },
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.SIGN_UP);
|
||||
assertNoRedirect();
|
||||
});
|
||||
|
||||
it('should not redirect when not logged in user visits password reset page', () => {
|
||||
@@ -543,7 +559,7 @@ describe('PrivateRoute', () => {
|
||||
appContext: { isLoggedIn: false },
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.PASSWORD_RESET);
|
||||
assertNoRedirect();
|
||||
});
|
||||
|
||||
it('should not redirect when not logged in user visits forgot password page', () => {
|
||||
@@ -552,7 +568,7 @@ describe('PrivateRoute', () => {
|
||||
appContext: { isLoggedIn: false },
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.FORGOT_PASSWORD);
|
||||
assertNoRedirect();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -641,7 +657,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
// Admin should be able to access settings even when workspace is blocked
|
||||
assertStaysOnRoute(ROUTES.SETTINGS);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should allow ADMIN to access /settings/billing when workspace is blocked', () => {
|
||||
@@ -657,7 +673,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.BILLING);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should allow ADMIN to access /settings/org-settings when workspace is blocked', () => {
|
||||
@@ -673,7 +689,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.ORG_SETTINGS);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should allow ADMIN to access /settings/members when workspace is blocked', () => {
|
||||
@@ -689,7 +705,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.MEMBERS_SETTINGS);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should allow ADMIN to access /settings/my-settings when workspace is blocked', () => {
|
||||
@@ -705,7 +721,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.MY_SETTINGS);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should redirect VIEWER to workspace locked even when trying to access settings', async () => {
|
||||
@@ -816,7 +832,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.WORKSPACE_LOCKED);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should not redirect self-hosted users to workspace locked even when workSpaceBlock is true', () => {
|
||||
@@ -833,7 +849,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: false,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -903,7 +919,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
});
|
||||
|
||||
it('should not redirect self-hosted users to workspace access restricted when license is terminated', () => {
|
||||
@@ -920,7 +936,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: false,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
});
|
||||
|
||||
it('should not redirect when license is ACTIVE', () => {
|
||||
@@ -937,7 +953,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
});
|
||||
|
||||
it('should not redirect when license is EVALUATING', () => {
|
||||
@@ -954,7 +970,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -990,7 +1006,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.WORKSPACE_SUSPENDED);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_SUSPENDED);
|
||||
});
|
||||
|
||||
it('should not redirect self-hosted users to workspace suspended when license is defaulted', () => {
|
||||
@@ -1007,7 +1023,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: false,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_SUSPENDED);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1027,11 +1043,6 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
// Wait for the users query to complete and trigger re-render
|
||||
await act(async () => {
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
await assertRedirectsTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
@@ -1047,7 +1058,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when onboarding is already complete', async () => {
|
||||
@@ -1073,7 +1084,7 @@ describe('PrivateRoute', () => {
|
||||
|
||||
// Critical: if isOnboardingComplete check is broken (always false),
|
||||
// this test would fail because all other conditions for redirect ARE met
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding for non-cloud users', () => {
|
||||
@@ -1088,7 +1099,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: false,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when on /workspace-locked route', () => {
|
||||
@@ -1103,7 +1114,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.WORKSPACE_LOCKED);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when on /workspace-suspended route', () => {
|
||||
@@ -1118,7 +1129,7 @@ describe('PrivateRoute', () => {
|
||||
isCloudUser: true,
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.WORKSPACE_SUSPENDED);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when workspace is blocked and accessing billing', async () => {
|
||||
@@ -1145,7 +1156,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
// Should NOT redirect to onboarding - user needs to access billing to fix payment
|
||||
assertStaysOnRoute(ROUTES.BILLING);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when workspace is blocked and accessing settings', async () => {
|
||||
@@ -1169,7 +1180,7 @@ describe('PrivateRoute', () => {
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.SETTINGS);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when workspace is suspended (DEFAULTED)', async () => {
|
||||
@@ -1196,7 +1207,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
// Should redirect to WORKSPACE_SUSPENDED, not ONBOARDING
|
||||
await assertRedirectsTo(ROUTES.WORKSPACE_SUSPENDED);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when workspace is access restricted (TERMINATED)', async () => {
|
||||
@@ -1223,7 +1234,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
// Should redirect to WORKSPACE_ACCESS_RESTRICTED, not ONBOARDING
|
||||
await assertRedirectsTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
|
||||
it('should not redirect to onboarding when workspace is access restricted (EXPIRED)', async () => {
|
||||
@@ -1249,7 +1260,7 @@ describe('PrivateRoute', () => {
|
||||
await Promise.resolve();
|
||||
});
|
||||
|
||||
await assertRedirectsTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
assertDoesNotRedirectTo(ROUTES.ONBOARDING);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1291,7 +1302,7 @@ describe('PrivateRoute', () => {
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.GET_STARTED);
|
||||
assertDoesNotRedirectTo(ROUTES.GET_STARTED_WITH_CLOUD);
|
||||
});
|
||||
|
||||
it('should not redirect when on GET_STARTED and ONBOARDING_V3 feature flag is not present', () => {
|
||||
@@ -1303,7 +1314,7 @@ describe('PrivateRoute', () => {
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.GET_STARTED);
|
||||
assertDoesNotRedirectTo(ROUTES.GET_STARTED_WITH_CLOUD);
|
||||
});
|
||||
|
||||
it('should not redirect when on different route even if ONBOARDING_V3 is active', () => {
|
||||
@@ -1323,7 +1334,7 @@ describe('PrivateRoute', () => {
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.GET_STARTED_WITH_CLOUD);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1339,7 +1350,7 @@ describe('PrivateRoute', () => {
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
});
|
||||
|
||||
it('should not fetch users when org data is not available', () => {
|
||||
@@ -1382,7 +1393,9 @@ describe('PrivateRoute', () => {
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.HOME);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_LOCKED);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_SUSPENDED);
|
||||
assertDoesNotRedirectTo(ROUTES.WORKSPACE_ACCESS_RESTRICTED);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1423,40 +1436,22 @@ describe('PrivateRoute', () => {
|
||||
await assertRedirectsTo(ROUTES.UN_AUTHORIZED);
|
||||
});
|
||||
|
||||
it('should allow ADMIN to access /services route', () => {
|
||||
renderPrivateRoute({
|
||||
initialRoute: ROUTES.APPLICATION,
|
||||
appContext: {
|
||||
isLoggedIn: true,
|
||||
user: createMockUser({ role: USER_ROLES.ADMIN as ROLES }),
|
||||
},
|
||||
it('should allow all roles to access /services route', () => {
|
||||
const roles = [USER_ROLES.ADMIN, USER_ROLES.EDITOR, USER_ROLES.VIEWER];
|
||||
|
||||
roles.forEach((role) => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
renderPrivateRoute({
|
||||
initialRoute: ROUTES.APPLICATION,
|
||||
appContext: {
|
||||
isLoggedIn: true,
|
||||
user: createMockUser({ role: role as ROLES }),
|
||||
},
|
||||
});
|
||||
|
||||
assertDoesNotRedirectTo(ROUTES.UN_AUTHORIZED);
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.APPLICATION);
|
||||
});
|
||||
|
||||
it('should allow EDITOR to access /services route', () => {
|
||||
renderPrivateRoute({
|
||||
initialRoute: ROUTES.APPLICATION,
|
||||
appContext: {
|
||||
isLoggedIn: true,
|
||||
user: createMockUser({ role: USER_ROLES.EDITOR as ROLES }),
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.APPLICATION);
|
||||
});
|
||||
|
||||
it('should allow VIEWER to access /services route', () => {
|
||||
renderPrivateRoute({
|
||||
initialRoute: ROUTES.APPLICATION,
|
||||
appContext: {
|
||||
isLoggedIn: true,
|
||||
user: createMockUser({ role: USER_ROLES.VIEWER as ROLES }),
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.APPLICATION);
|
||||
});
|
||||
|
||||
it('should redirect VIEWER from /onboarding route (admin only)', async () => {
|
||||
@@ -1486,7 +1481,7 @@ describe('PrivateRoute', () => {
|
||||
});
|
||||
|
||||
assertRendersChildren();
|
||||
assertStaysOnRoute(ROUTES.CHANNELS_NEW);
|
||||
assertDoesNotRedirectTo(ROUTES.UN_AUTHORIZED);
|
||||
});
|
||||
|
||||
it('should allow EDITOR to access /get-started route', () => {
|
||||
@@ -1498,7 +1493,7 @@ describe('PrivateRoute', () => {
|
||||
},
|
||||
});
|
||||
|
||||
assertStaysOnRoute(ROUTES.GET_STARTED);
|
||||
assertDoesNotRedirectTo(ROUTES.UN_AUTHORIZED);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -24,8 +24,8 @@ import type {
|
||||
AgentCheckInDeprecated200,
|
||||
AgentCheckInDeprecatedPathParameters,
|
||||
AgentCheckInPathParameters,
|
||||
CloudintegrationtypesPostableAccountDTO,
|
||||
CloudintegrationtypesPostableAgentCheckInDTO,
|
||||
CloudintegrationtypesConnectionArtifactRequestDTO,
|
||||
CloudintegrationtypesPostableAgentCheckInRequestDTO,
|
||||
CloudintegrationtypesUpdatableAccountDTO,
|
||||
CloudintegrationtypesUpdatableServiceDTO,
|
||||
CreateAccount200,
|
||||
@@ -33,8 +33,6 @@ import type {
|
||||
DisconnectAccountPathParameters,
|
||||
GetAccount200,
|
||||
GetAccountPathParameters,
|
||||
GetConnectionCredentials200,
|
||||
GetConnectionCredentialsPathParameters,
|
||||
GetService200,
|
||||
GetServicePathParameters,
|
||||
ListAccounts200,
|
||||
@@ -53,14 +51,14 @@ import type {
|
||||
*/
|
||||
export const agentCheckInDeprecated = (
|
||||
{ cloudProvider }: AgentCheckInDeprecatedPathParameters,
|
||||
cloudintegrationtypesPostableAgentCheckInDTO: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>,
|
||||
cloudintegrationtypesPostableAgentCheckInRequestDTO: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<AgentCheckInDeprecated200>({
|
||||
url: `/api/v1/cloud-integrations/${cloudProvider}/agent-check-in`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesPostableAgentCheckInDTO,
|
||||
data: cloudintegrationtypesPostableAgentCheckInRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
@@ -74,7 +72,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -83,7 +81,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -100,7 +98,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
||||
Awaited<ReturnType<typeof agentCheckInDeprecated>>,
|
||||
{
|
||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
@@ -114,7 +112,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
||||
export type AgentCheckInDeprecatedMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof agentCheckInDeprecated>>
|
||||
>;
|
||||
export type AgentCheckInDeprecatedMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
export type AgentCheckInDeprecatedMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
export type AgentCheckInDeprecatedMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
@@ -130,7 +128,7 @@ export const useAgentCheckInDeprecated = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -139,7 +137,7 @@ export const useAgentCheckInDeprecated = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -257,14 +255,14 @@ export const invalidateListAccounts = async (
|
||||
*/
|
||||
export const createAccount = (
|
||||
{ cloudProvider }: CreateAccountPathParameters,
|
||||
cloudintegrationtypesPostableAccountDTO: BodyType<CloudintegrationtypesPostableAccountDTO>,
|
||||
cloudintegrationtypesConnectionArtifactRequestDTO: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<CreateAccount200>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesPostableAccountDTO,
|
||||
data: cloudintegrationtypesConnectionArtifactRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
@@ -278,7 +276,7 @@ export const getCreateAccountMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateAccountPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -287,7 +285,7 @@ export const getCreateAccountMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateAccountPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -304,7 +302,7 @@ export const getCreateAccountMutationOptions = <
|
||||
Awaited<ReturnType<typeof createAccount>>,
|
||||
{
|
||||
pathParams: CreateAccountPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
@@ -318,7 +316,7 @@ export const getCreateAccountMutationOptions = <
|
||||
export type CreateAccountMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof createAccount>>
|
||||
>;
|
||||
export type CreateAccountMutationBody = BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||
export type CreateAccountMutationBody = BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
||||
export type CreateAccountMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
@@ -333,7 +331,7 @@ export const useCreateAccount = <
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateAccountPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -342,7 +340,7 @@ export const useCreateAccount = <
|
||||
TError,
|
||||
{
|
||||
pathParams: CreateAccountPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -630,117 +628,20 @@ export const useUpdateAccount = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint updates a service for the specified cloud provider
|
||||
* @summary Update service
|
||||
*/
|
||||
export const updateService = (
|
||||
{ cloudProvider, id, serviceId }: UpdateServicePathParameters,
|
||||
cloudintegrationtypesUpdatableServiceDTO: BodyType<CloudintegrationtypesUpdatableServiceDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts/${id}/services/${serviceId}`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesUpdatableServiceDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateService'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateService(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateService>>
|
||||
>;
|
||||
export type UpdateServiceMutationBody = BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
export type UpdateServiceMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Update service
|
||||
*/
|
||||
export const useUpdateService = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint is called by the deployed agent to check in
|
||||
* @summary Agent check-in
|
||||
*/
|
||||
export const agentCheckIn = (
|
||||
{ cloudProvider }: AgentCheckInPathParameters,
|
||||
cloudintegrationtypesPostableAgentCheckInDTO: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>,
|
||||
cloudintegrationtypesPostableAgentCheckInRequestDTO: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<AgentCheckIn200>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts/check_in`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesPostableAgentCheckInDTO,
|
||||
data: cloudintegrationtypesPostableAgentCheckInRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
@@ -754,7 +655,7 @@ export const getAgentCheckInMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -763,7 +664,7 @@ export const getAgentCheckInMutationOptions = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -780,7 +681,7 @@ export const getAgentCheckInMutationOptions = <
|
||||
Awaited<ReturnType<typeof agentCheckIn>>,
|
||||
{
|
||||
pathParams: AgentCheckInPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
@@ -794,7 +695,7 @@ export const getAgentCheckInMutationOptions = <
|
||||
export type AgentCheckInMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof agentCheckIn>>
|
||||
>;
|
||||
export type AgentCheckInMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
export type AgentCheckInMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
export type AgentCheckInMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
@@ -809,7 +710,7 @@ export const useAgentCheckIn = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
@@ -818,7 +719,7 @@ export const useAgentCheckIn = <
|
||||
TError,
|
||||
{
|
||||
pathParams: AgentCheckInPathParameters;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
@@ -826,114 +727,6 @@ export const useAgentCheckIn = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint retrieves the connection credentials required for integration
|
||||
* @summary Get connection credentials
|
||||
*/
|
||||
export const getConnectionCredentials = (
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetConnectionCredentials200>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/credentials`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetConnectionCredentialsQueryKey = ({
|
||||
cloudProvider,
|
||||
}: GetConnectionCredentialsPathParameters) => {
|
||||
return [`/api/v1/cloud_integrations/${cloudProvider}/credentials`] as const;
|
||||
};
|
||||
|
||||
export const getGetConnectionCredentialsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ??
|
||||
getGetConnectionCredentialsQueryKey({ cloudProvider });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>
|
||||
> = ({ signal }) => getConnectionCredentials({ cloudProvider }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!cloudProvider,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetConnectionCredentialsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>
|
||||
>;
|
||||
export type GetConnectionCredentialsQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Get connection credentials
|
||||
*/
|
||||
|
||||
export function useGetConnectionCredentials<
|
||||
TData = Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetConnectionCredentialsQueryOptions(
|
||||
{ cloudProvider },
|
||||
options,
|
||||
);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get connection credentials
|
||||
*/
|
||||
export const invalidateGetConnectionCredentials = async (
|
||||
queryClient: QueryClient,
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetConnectionCredentialsQueryKey({ cloudProvider }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint lists the services metadata for the specified cloud provider
|
||||
* @summary List services metadata
|
||||
@@ -1148,3 +941,101 @@ export const invalidateGetService = async (
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint updates a service for the specified cloud provider
|
||||
* @summary Update service
|
||||
*/
|
||||
export const updateService = (
|
||||
{ cloudProvider, serviceId }: UpdateServicePathParameters,
|
||||
cloudintegrationtypesUpdatableServiceDTO: BodyType<CloudintegrationtypesUpdatableServiceDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/services/${serviceId}`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesUpdatableServiceDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateService'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateService(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateService>>
|
||||
>;
|
||||
export type UpdateServiceMutationBody = BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
export type UpdateServiceMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Update service
|
||||
*/
|
||||
export const useUpdateService = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
|
||||
104
frontend/src/api/generated/services/infra-monitoring/index.ts
Normal file
104
frontend/src/api/generated/services/infra-monitoring/index.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
MutationFunction,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
|
||||
import type { BodyType, ErrorType } from '../../../generatedAPIInstance';
|
||||
import { GeneratedAPIInstance } from '../../../generatedAPIInstance';
|
||||
import type {
|
||||
HostsList200,
|
||||
InframonitoringtypesHostsListRequestDTO,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
/**
|
||||
* This endpoint returns a list of hosts along with other information for each of them
|
||||
* @summary List Hosts for Infra Monitoring
|
||||
*/
|
||||
export const hostsList = (
|
||||
inframonitoringtypesHostsListRequestDTO: BodyType<InframonitoringtypesHostsListRequestDTO>,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<HostsList200>({
|
||||
url: `/api/v2/infra-monitoring/hosts/list`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: inframonitoringtypesHostsListRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getHostsListMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof hostsList>>,
|
||||
TError,
|
||||
{ data: BodyType<InframonitoringtypesHostsListRequestDTO> },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof hostsList>>,
|
||||
TError,
|
||||
{ data: BodyType<InframonitoringtypesHostsListRequestDTO> },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['hostsList'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof hostsList>>,
|
||||
{ data: BodyType<InframonitoringtypesHostsListRequestDTO> }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return hostsList(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type HostsListMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof hostsList>>
|
||||
>;
|
||||
export type HostsListMutationBody = BodyType<InframonitoringtypesHostsListRequestDTO>;
|
||||
export type HostsListMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary List Hosts for Infra Monitoring
|
||||
*/
|
||||
export const useHostsList = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof hostsList>>,
|
||||
TError,
|
||||
{ data: BodyType<InframonitoringtypesHostsListRequestDTO> },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof hostsList>>,
|
||||
TError,
|
||||
{ data: BodyType<InframonitoringtypesHostsListRequestDTO> },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getHostsListMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
@@ -512,58 +512,27 @@ export interface CloudintegrationtypesAWSAccountConfigDTO {
|
||||
regions: string[];
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSCloudWatchLogsSubscriptionDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
filterPattern: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
logGroupNamePrefix: string;
|
||||
}
|
||||
export type CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets = {
|
||||
[key: string]: string[];
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesAWSCloudWatchMetricStreamFilterDTO {
|
||||
export interface CloudintegrationtypesAWSCollectionStrategyDTO {
|
||||
aws_logs?: CloudintegrationtypesAWSLogsStrategyDTO;
|
||||
aws_metrics?: CloudintegrationtypesAWSMetricsStrategyDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @type object
|
||||
*/
|
||||
metricNames?: string[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
namespace: string;
|
||||
s3_buckets?: CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSConnectionArtifactDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
connectionUrl: string;
|
||||
connectionURL: string;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSIntegrationConfigDTO {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
enabledRegions: string[];
|
||||
telemetryCollectionStrategy: CloudintegrationtypesAWSTelemetryCollectionStrategyDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSLogsCollectionStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
subscriptions: CloudintegrationtypesAWSCloudWatchLogsSubscriptionDTO[];
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSMetricsCollectionStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
streamFilters: CloudintegrationtypesAWSCloudWatchMetricStreamFilterDTO[];
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSPostableAccountConfigDTO {
|
||||
export interface CloudintegrationtypesAWSConnectionArtifactRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -574,6 +543,56 @@ export interface CloudintegrationtypesAWSPostableAccountConfigDTO {
|
||||
regions: string[];
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSIntegrationConfigDTO {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
enabledRegions: string[];
|
||||
telemetry: CloudintegrationtypesAWSCollectionStrategyDTO;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem = {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
filter_pattern?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
log_group_name_prefix?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesAWSLogsStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatch_logs_subscriptions?:
|
||||
| CloudintegrationtypesAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem[]
|
||||
| null;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
MetricNames?: string[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
Namespace?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesAWSMetricsStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatch_metric_stream_filters?:
|
||||
| CloudintegrationtypesAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem[]
|
||||
| null;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSServiceConfigDTO {
|
||||
logs?: CloudintegrationtypesAWSServiceLogsConfigDTO;
|
||||
metrics?: CloudintegrationtypesAWSServiceMetricsConfigDTO;
|
||||
@@ -591,7 +610,7 @@ export interface CloudintegrationtypesAWSServiceLogsConfigDTO {
|
||||
/**
|
||||
* @type object
|
||||
*/
|
||||
s3Buckets?: CloudintegrationtypesAWSServiceLogsConfigDTOS3Buckets;
|
||||
s3_buckets?: CloudintegrationtypesAWSServiceLogsConfigDTOS3Buckets;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSServiceMetricsConfigDTO {
|
||||
@@ -601,19 +620,6 @@ export interface CloudintegrationtypesAWSServiceMetricsConfigDTO {
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesAWSTelemetryCollectionStrategyDTOS3Buckets = {
|
||||
[key: string]: string[];
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesAWSTelemetryCollectionStrategyDTO {
|
||||
logs?: CloudintegrationtypesAWSLogsCollectionStrategyDTO;
|
||||
metrics?: CloudintegrationtypesAWSMetricsCollectionStrategyDTO;
|
||||
/**
|
||||
* @type object
|
||||
*/
|
||||
s3Buckets?: CloudintegrationtypesAWSTelemetryCollectionStrategyDTOS3Buckets;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAccountDTO {
|
||||
agentReport: CloudintegrationtypesAgentReportDTO;
|
||||
config: CloudintegrationtypesAccountConfigDTO;
|
||||
@@ -687,32 +693,6 @@ export interface CloudintegrationtypesAssetsDTO {
|
||||
dashboards?: CloudintegrationtypesDashboardDTO[] | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type CloudintegrationtypesCloudIntegrationServiceDTO = {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
cloudIntegrationId?: string;
|
||||
config?: CloudintegrationtypesServiceConfigDTO;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
type?: CloudintegrationtypesServiceIDDTO;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
} | null;
|
||||
|
||||
export interface CloudintegrationtypesCollectedLogAttributeDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -747,27 +727,16 @@ export interface CloudintegrationtypesCollectedMetricDTO {
|
||||
unit?: string;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesCollectionStrategyDTO {
|
||||
aws: CloudintegrationtypesAWSCollectionStrategyDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesConnectionArtifactDTO {
|
||||
aws: CloudintegrationtypesAWSConnectionArtifactDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesCredentialsDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
ingestionKey: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
ingestionUrl: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
sigNozApiKey: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
sigNozApiUrl: string;
|
||||
export interface CloudintegrationtypesConnectionArtifactRequestDTO {
|
||||
aws: CloudintegrationtypesAWSConnectionArtifactRequestDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesDashboardDTO {
|
||||
@@ -799,7 +768,7 @@ export interface CloudintegrationtypesDataCollectedDTO {
|
||||
metrics?: CloudintegrationtypesCollectedMetricDTO[] | null;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesGettableAccountWithConnectionArtifactDTO {
|
||||
export interface CloudintegrationtypesGettableAccountWithArtifactDTO {
|
||||
connectionArtifact: CloudintegrationtypesConnectionArtifactDTO;
|
||||
/**
|
||||
* @type string
|
||||
@@ -814,7 +783,7 @@ export interface CloudintegrationtypesGettableAccountsDTO {
|
||||
accounts: CloudintegrationtypesAccountDTO[];
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesGettableAgentCheckInDTO {
|
||||
export interface CloudintegrationtypesGettableAgentCheckInResponseDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -862,85 +831,17 @@ export type CloudintegrationtypesIntegrationConfigDTO = {
|
||||
* @type array
|
||||
*/
|
||||
enabled_regions: string[];
|
||||
telemetry: CloudintegrationtypesOldAWSCollectionStrategyDTO;
|
||||
telemetry: CloudintegrationtypesAWSCollectionStrategyDTO;
|
||||
} | null;
|
||||
|
||||
export type CloudintegrationtypesOldAWSCollectionStrategyDTOS3Buckets = {
|
||||
[key: string]: string[];
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesOldAWSCollectionStrategyDTO {
|
||||
aws_logs?: CloudintegrationtypesOldAWSLogsStrategyDTO;
|
||||
aws_metrics?: CloudintegrationtypesOldAWSMetricsStrategyDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
provider?: string;
|
||||
/**
|
||||
* @type object
|
||||
*/
|
||||
s3_buckets?: CloudintegrationtypesOldAWSCollectionStrategyDTOS3Buckets;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesOldAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem = {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
filter_pattern?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
log_group_name_prefix?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesOldAWSLogsStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatch_logs_subscriptions?:
|
||||
| CloudintegrationtypesOldAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem[]
|
||||
| null;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesOldAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
MetricNames?: string[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
Namespace?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesOldAWSMetricsStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatch_metric_stream_filters?:
|
||||
| CloudintegrationtypesOldAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem[]
|
||||
| null;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesPostableAccountDTO {
|
||||
config: CloudintegrationtypesPostableAccountConfigDTO;
|
||||
credentials: CloudintegrationtypesCredentialsDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesPostableAccountConfigDTO {
|
||||
aws: CloudintegrationtypesAWSPostableAccountConfigDTO;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type CloudintegrationtypesPostableAgentCheckInDTOData = {
|
||||
export type CloudintegrationtypesPostableAgentCheckInRequestDTOData = {
|
||||
[key: string]: unknown;
|
||||
} | null;
|
||||
|
||||
export interface CloudintegrationtypesPostableAgentCheckInDTO {
|
||||
export interface CloudintegrationtypesPostableAgentCheckInRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -957,7 +858,7 @@ export interface CloudintegrationtypesPostableAgentCheckInDTO {
|
||||
* @type object
|
||||
* @nullable true
|
||||
*/
|
||||
data: CloudintegrationtypesPostableAgentCheckInDTOData;
|
||||
data: CloudintegrationtypesPostableAgentCheckInRequestDTOData;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -970,7 +871,6 @@ export interface CloudintegrationtypesProviderIntegrationConfigDTO {
|
||||
|
||||
export interface CloudintegrationtypesServiceDTO {
|
||||
assets: CloudintegrationtypesAssetsDTO;
|
||||
cloudIntegrationService: CloudintegrationtypesCloudIntegrationServiceDTO;
|
||||
dataCollected: CloudintegrationtypesDataCollectedDTO;
|
||||
/**
|
||||
* @type string
|
||||
@@ -984,8 +884,9 @@ export interface CloudintegrationtypesServiceDTO {
|
||||
* @type string
|
||||
*/
|
||||
overview: string;
|
||||
supportedSignals: CloudintegrationtypesSupportedSignalsDTO;
|
||||
telemetryCollectionStrategy: CloudintegrationtypesTelemetryCollectionStrategyDTO;
|
||||
serviceConfig?: CloudintegrationtypesServiceConfigDTO;
|
||||
supported_signals: CloudintegrationtypesSupportedSignalsDTO;
|
||||
telemetryCollectionStrategy: CloudintegrationtypesCollectionStrategyDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -996,21 +897,6 @@ export interface CloudintegrationtypesServiceConfigDTO {
|
||||
aws: CloudintegrationtypesAWSServiceConfigDTO;
|
||||
}
|
||||
|
||||
export enum CloudintegrationtypesServiceIDDTO {
|
||||
alb = 'alb',
|
||||
'api-gateway' = 'api-gateway',
|
||||
dynamodb = 'dynamodb',
|
||||
ec2 = 'ec2',
|
||||
ecs = 'ecs',
|
||||
eks = 'eks',
|
||||
elasticache = 'elasticache',
|
||||
lambda = 'lambda',
|
||||
msk = 'msk',
|
||||
rds = 'rds',
|
||||
s3sync = 's3sync',
|
||||
sns = 'sns',
|
||||
sqs = 'sqs',
|
||||
}
|
||||
export interface CloudintegrationtypesServiceMetadataDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
@@ -1041,10 +927,6 @@ export interface CloudintegrationtypesSupportedSignalsDTO {
|
||||
metrics?: boolean;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesTelemetryCollectionStrategyDTO {
|
||||
aws: CloudintegrationtypesAWSTelemetryCollectionStrategyDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesUpdatableAccountDTO {
|
||||
config: CloudintegrationtypesAccountConfigDTO;
|
||||
}
|
||||
@@ -1448,6 +1330,112 @@ export interface GlobaltypesTokenizerConfigDTO {
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type InframonitoringtypesHostRecordDTOMeta = {
|
||||
[key: string]: unknown;
|
||||
} | null;
|
||||
|
||||
export interface InframonitoringtypesHostRecordDTO {
|
||||
/**
|
||||
* @type number
|
||||
* @format double
|
||||
*/
|
||||
cpu?: number;
|
||||
/**
|
||||
* @type number
|
||||
* @format double
|
||||
*/
|
||||
diskUsage?: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
hostName?: string;
|
||||
/**
|
||||
* @type number
|
||||
* @format double
|
||||
*/
|
||||
load15?: number;
|
||||
/**
|
||||
* @type number
|
||||
* @format double
|
||||
*/
|
||||
memory?: number;
|
||||
/**
|
||||
* @type object
|
||||
* @nullable true
|
||||
*/
|
||||
meta?: InframonitoringtypesHostRecordDTOMeta;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
/**
|
||||
* @type number
|
||||
* @format double
|
||||
*/
|
||||
wait?: number;
|
||||
}
|
||||
|
||||
export enum InframonitoringtypesHostStatusDTO {
|
||||
active = 'active',
|
||||
inactive = 'inactive',
|
||||
'' = '',
|
||||
}
|
||||
export interface InframonitoringtypesHostsListRequestDTO {
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
end?: number;
|
||||
filter?: Querybuildertypesv5FilterDTO;
|
||||
filterByStatus?: InframonitoringtypesHostStatusDTO;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
groupBy?: Querybuildertypesv5GroupByKeyDTO[] | null;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
limit?: number;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
offset?: number;
|
||||
orderBy?: Querybuildertypesv5OrderByDTO;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
*/
|
||||
start?: number;
|
||||
}
|
||||
|
||||
export interface InframonitoringtypesHostsListResponseDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
endTimeBeforeRetention?: boolean;
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
records?: InframonitoringtypesHostRecordDTO[] | null;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
sentAnyMetricsData?: boolean;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
total?: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type?: string;
|
||||
}
|
||||
|
||||
export interface MetricsexplorertypesInspectMetricsRequestDTO {
|
||||
/**
|
||||
* @type integer
|
||||
@@ -3568,7 +3556,7 @@ export type AgentCheckInDeprecatedPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
export type AgentCheckInDeprecated200 = {
|
||||
data: CloudintegrationtypesGettableAgentCheckInDTO;
|
||||
data: CloudintegrationtypesGettableAgentCheckInResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -3590,7 +3578,7 @@ export type CreateAccountPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
export type CreateAccount200 = {
|
||||
data: CloudintegrationtypesGettableAccountWithConnectionArtifactDTO;
|
||||
data: CloudintegrationtypesGettableAccountWithArtifactDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -3617,27 +3605,11 @@ export type UpdateAccountPathParameters = {
|
||||
cloudProvider: string;
|
||||
id: string;
|
||||
};
|
||||
export type UpdateServicePathParameters = {
|
||||
cloudProvider: string;
|
||||
id: string;
|
||||
serviceId: string;
|
||||
};
|
||||
export type AgentCheckInPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
export type AgentCheckIn200 = {
|
||||
data: CloudintegrationtypesGettableAgentCheckInDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type GetConnectionCredentialsPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
export type GetConnectionCredentials200 = {
|
||||
data: CloudintegrationtypesCredentialsDTO;
|
||||
data: CloudintegrationtypesGettableAgentCheckInResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -3667,6 +3639,10 @@ export type GetService200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type UpdateServicePathParameters = {
|
||||
cloudProvider: string;
|
||||
serviceId: string;
|
||||
};
|
||||
export type CreateSessionByGoogleCallback303 = {
|
||||
data: AuthtypesGettableTokenDTO;
|
||||
/**
|
||||
@@ -4322,6 +4298,14 @@ export type Healthz503 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type HostsList200 = {
|
||||
data: InframonitoringtypesHostsListResponseDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type Livez200 = {
|
||||
data: FactoryResponseDTO;
|
||||
/**
|
||||
|
||||
@@ -14,8 +14,6 @@ import type { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schem
|
||||
import { AxiosError } from 'axios';
|
||||
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||
import { parseAsBoolean, useQueryState } from 'nuqs';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
import './CreateServiceAccountModal.styles.scss';
|
||||
|
||||
@@ -30,8 +28,6 @@ function CreateServiceAccountModal(): JSX.Element {
|
||||
parseAsBoolean.withDefault(false),
|
||||
);
|
||||
|
||||
const { showErrorModal, isErrorModalVisible } = useErrorModal();
|
||||
|
||||
const {
|
||||
control,
|
||||
handleSubmit,
|
||||
@@ -58,10 +54,13 @@ function CreateServiceAccountModal(): JSX.Element {
|
||||
await invalidateListServiceAccounts(queryClient);
|
||||
},
|
||||
onError: (err) => {
|
||||
const errMessage = convertToApiError(
|
||||
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
);
|
||||
showErrorModal(errMessage as APIError);
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
)?.getErrorMessage() || 'An error occurred';
|
||||
toast.error(`Failed to create service account: ${errMessage}`, {
|
||||
richColors: true,
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -91,7 +90,7 @@ function CreateServiceAccountModal(): JSX.Element {
|
||||
showCloseButton
|
||||
width="narrow"
|
||||
className="create-sa-modal"
|
||||
disableOutsideClick={isErrorModalVisible}
|
||||
disableOutsideClick={false}
|
||||
>
|
||||
<div className="create-sa-modal__content">
|
||||
<form
|
||||
|
||||
@@ -11,16 +11,6 @@ jest.mock('@signozhq/sonner', () => ({
|
||||
|
||||
const mockToast = jest.mocked(toast);
|
||||
|
||||
const showErrorModal = jest.fn();
|
||||
jest.mock('providers/ErrorModalProvider', () => ({
|
||||
__esModule: true,
|
||||
...jest.requireActual('providers/ErrorModalProvider'),
|
||||
useErrorModal: jest.fn(() => ({
|
||||
showErrorModal,
|
||||
isErrorModalVisible: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
const SERVICE_ACCOUNTS_ENDPOINT = '*/api/v1/service_accounts';
|
||||
|
||||
function renderModal(): ReturnType<typeof render> {
|
||||
@@ -102,13 +92,10 @@ describe('CreateServiceAccountModal', () => {
|
||||
await user.click(submitBtn);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(showErrorModal).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
getErrorMessage: expect.any(Function),
|
||||
}),
|
||||
expect(mockToast.error).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/Failed to create service account/i),
|
||||
expect.anything(),
|
||||
);
|
||||
const passedError = showErrorModal.mock.calls[0][0] as any;
|
||||
expect(passedError.getErrorMessage()).toBe('Internal Server Error');
|
||||
});
|
||||
|
||||
expect(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Badge } from '@signozhq/badge';
|
||||
import { Button } from '@signozhq/button';
|
||||
@@ -28,7 +28,6 @@ import {
|
||||
useMemberRoleManager,
|
||||
} from 'hooks/member/useMemberRoleManager';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import APIError from 'types/api/error';
|
||||
import { toAPIError } from 'utils/errorUtils';
|
||||
@@ -91,11 +90,8 @@ function EditMemberDrawer({
|
||||
const [linkType, setLinkType] = useState<'invite' | 'reset' | null>(null);
|
||||
|
||||
const isInvited = member?.status === MemberStatus.Invited;
|
||||
const isDeleted = member?.status === MemberStatus.Deleted;
|
||||
const isSelf = !!member?.id && member.id === currentUser?.id;
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
const {
|
||||
data: fetchedUser,
|
||||
isLoading: isFetchingUser,
|
||||
@@ -115,39 +111,26 @@ function EditMemberDrawer({
|
||||
refetch: refetchRoles,
|
||||
} = useRoles();
|
||||
|
||||
const {
|
||||
fetchedRoleIds,
|
||||
isLoading: isMemberRolesLoading,
|
||||
applyDiff,
|
||||
} = useMemberRoleManager(member?.id ?? '', open && !!member?.id);
|
||||
const { fetchedRoleIds, applyDiff } = useMemberRoleManager(
|
||||
member?.id ?? '',
|
||||
open && !!member?.id,
|
||||
);
|
||||
|
||||
const fetchedDisplayName =
|
||||
fetchedUser?.data?.displayName ?? member?.name ?? '';
|
||||
const fetchedUserId = fetchedUser?.data?.id;
|
||||
const fetchedUserDisplayName = fetchedUser?.data?.displayName;
|
||||
|
||||
const roleSessionRef = useRef<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (fetchedUserId) {
|
||||
setLocalDisplayName(fetchedUserDisplayName ?? member?.name ?? '');
|
||||
}
|
||||
setSaveErrors([]);
|
||||
}, [fetchedUserId, fetchedUserDisplayName, member?.name]);
|
||||
|
||||
useEffect(() => {
|
||||
if (fetchedUserId) {
|
||||
setSaveErrors([]);
|
||||
}
|
||||
}, [fetchedUserId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!member?.id) {
|
||||
roleSessionRef.current = null;
|
||||
} else if (member.id !== roleSessionRef.current && !isMemberRolesLoading) {
|
||||
setLocalRole(fetchedRoleIds[0] ?? '');
|
||||
roleSessionRef.current = member.id;
|
||||
}
|
||||
}, [member?.id, fetchedRoleIds, isMemberRolesLoading]);
|
||||
setLocalRole(fetchedRoleIds[0] ?? '');
|
||||
}, [fetchedRoleIds]);
|
||||
|
||||
const isDirty =
|
||||
member !== null &&
|
||||
@@ -170,10 +153,17 @@ function EditMemberDrawer({
|
||||
onClose();
|
||||
},
|
||||
onError: (err): void => {
|
||||
const errMessage = convertToApiError(
|
||||
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
);
|
||||
showErrorModal(errMessage as APIError);
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
)?.getErrorMessage() || 'An error occurred';
|
||||
const prefix = isInvited
|
||||
? 'Failed to revoke invite'
|
||||
: 'Failed to delete member';
|
||||
toast.error(`${prefix}: ${errMessage}`, {
|
||||
richColors: true,
|
||||
position: 'top-right',
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -354,15 +344,15 @@ function EditMemberDrawer({
|
||||
position: 'top-right',
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
const errMsg = convertToApiError(
|
||||
err as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
);
|
||||
showErrorModal(errMsg as APIError);
|
||||
} catch {
|
||||
toast.error('Failed to generate password reset link', {
|
||||
richColors: true,
|
||||
position: 'top-right',
|
||||
});
|
||||
} finally {
|
||||
setIsGeneratingLink(false);
|
||||
}
|
||||
}, [member, isInvited, onClose, showErrorModal]);
|
||||
}, [member, isInvited, onClose]);
|
||||
|
||||
const [copyState, copyToClipboard] = useCopyToClipboard();
|
||||
const handleCopyResetLink = useCallback((): void => {
|
||||
@@ -429,7 +419,7 @@ function EditMemberDrawer({
|
||||
}}
|
||||
className="edit-member-drawer__input"
|
||||
placeholder="Enter name"
|
||||
disabled={isRootUser || isDeleted}
|
||||
disabled={isRootUser}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
@@ -450,15 +440,9 @@ function EditMemberDrawer({
|
||||
<label className="edit-member-drawer__label" htmlFor="member-role">
|
||||
Roles
|
||||
</label>
|
||||
{isSelf || isRootUser || isDeleted ? (
|
||||
{isSelf || isRootUser ? (
|
||||
<Tooltip
|
||||
title={
|
||||
isRootUser
|
||||
? ROOT_USER_TOOLTIP
|
||||
: isDeleted
|
||||
? undefined
|
||||
: 'You cannot modify your own role'
|
||||
}
|
||||
title={isRootUser ? ROOT_USER_TOOLTIP : 'You cannot modify your own role'}
|
||||
>
|
||||
<div className="edit-member-drawer__input-wrapper edit-member-drawer__input-wrapper--disabled">
|
||||
<div className="edit-member-drawer__disabled-roles">
|
||||
@@ -483,7 +467,7 @@ function EditMemberDrawer({
|
||||
onRefetch={refetchRoles}
|
||||
value={localRole}
|
||||
onChange={(role): void => {
|
||||
setLocalRole(role ?? '');
|
||||
setLocalRole(role);
|
||||
setSaveErrors((prev) =>
|
||||
prev.filter(
|
||||
(err) =>
|
||||
@@ -492,7 +476,6 @@ function EditMemberDrawer({
|
||||
);
|
||||
}}
|
||||
placeholder="Select role"
|
||||
allowClear={false}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
@@ -504,10 +487,6 @@ function EditMemberDrawer({
|
||||
<Badge color="forest" variant="outline">
|
||||
ACTIVE
|
||||
</Badge>
|
||||
) : member?.status === MemberStatus.Deleted ? (
|
||||
<Badge color="cherry" variant="outline">
|
||||
DELETED
|
||||
</Badge>
|
||||
) : (
|
||||
<Badge color="amber" variant="outline">
|
||||
INVITED
|
||||
@@ -546,57 +525,55 @@ function EditMemberDrawer({
|
||||
<div className="edit-member-drawer__layout">
|
||||
<div className="edit-member-drawer__body">{drawerBody}</div>
|
||||
|
||||
{!isDeleted && (
|
||||
<div className="edit-member-drawer__footer">
|
||||
<div className="edit-member-drawer__footer-left">
|
||||
<Tooltip title={getDeleteTooltip(isRootUser, isSelf)}>
|
||||
<span className="edit-member-drawer__tooltip-wrapper">
|
||||
<Button
|
||||
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--danger"
|
||||
onClick={(): void => setShowDeleteConfirm(true)}
|
||||
disabled={isRootUser || isSelf}
|
||||
>
|
||||
<Trash2 size={12} />
|
||||
{isInvited ? 'Revoke Invite' : 'Delete Member'}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
<div className="edit-member-drawer__footer">
|
||||
<div className="edit-member-drawer__footer-left">
|
||||
<Tooltip title={getDeleteTooltip(isRootUser, isSelf)}>
|
||||
<span className="edit-member-drawer__tooltip-wrapper">
|
||||
<Button
|
||||
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--danger"
|
||||
onClick={(): void => setShowDeleteConfirm(true)}
|
||||
disabled={isRootUser || isSelf}
|
||||
>
|
||||
<Trash2 size={12} />
|
||||
{isInvited ? 'Revoke Invite' : 'Delete Member'}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
|
||||
<div className="edit-member-drawer__footer-divider" />
|
||||
<Tooltip title={isRootUser ? ROOT_USER_TOOLTIP : undefined}>
|
||||
<span className="edit-member-drawer__tooltip-wrapper">
|
||||
<Button
|
||||
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--warning"
|
||||
onClick={handleGenerateResetLink}
|
||||
disabled={isGeneratingLink || isRootUser}
|
||||
>
|
||||
<RefreshCw size={12} />
|
||||
{isGeneratingLink && 'Generating...'}
|
||||
{!isGeneratingLink && isInvited && 'Copy Invite Link'}
|
||||
{!isGeneratingLink && !isInvited && 'Generate Password Reset Link'}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div className="edit-member-drawer__footer-right">
|
||||
<Button variant="solid" color="secondary" size="sm" onClick={handleClose}>
|
||||
<X size={14} />
|
||||
Cancel
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
variant="solid"
|
||||
color="primary"
|
||||
size="sm"
|
||||
disabled={!isDirty || isSaving || isRootUser}
|
||||
onClick={handleSave}
|
||||
>
|
||||
{isSaving ? 'Saving...' : 'Save Member Details'}
|
||||
</Button>
|
||||
</div>
|
||||
<div className="edit-member-drawer__footer-divider" />
|
||||
<Tooltip title={isRootUser ? ROOT_USER_TOOLTIP : undefined}>
|
||||
<span className="edit-member-drawer__tooltip-wrapper">
|
||||
<Button
|
||||
className="edit-member-drawer__footer-btn edit-member-drawer__footer-btn--warning"
|
||||
onClick={handleGenerateResetLink}
|
||||
disabled={isGeneratingLink || isRootUser}
|
||||
>
|
||||
<RefreshCw size={12} />
|
||||
{isGeneratingLink && 'Generating...'}
|
||||
{!isGeneratingLink && isInvited && 'Copy Invite Link'}
|
||||
{!isGeneratingLink && !isInvited && 'Generate Password Reset Link'}
|
||||
</Button>
|
||||
</span>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="edit-member-drawer__footer-right">
|
||||
<Button variant="solid" color="secondary" size="sm" onClick={handleClose}>
|
||||
<X size={14} />
|
||||
Cancel
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
variant="solid"
|
||||
color="primary"
|
||||
size="sm"
|
||||
disabled={!isDirty || isSaving || isRootUser}
|
||||
onClick={handleSave}
|
||||
>
|
||||
{isSaving ? 'Saving...' : 'Save Member Details'}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
|
||||
@@ -84,16 +84,6 @@ const ROLES_ENDPOINT = '*/api/v1/roles';
|
||||
const mockDeleteMutate = jest.fn();
|
||||
const mockGetResetPasswordToken = jest.mocked(getResetPasswordToken);
|
||||
|
||||
const showErrorModal = jest.fn();
|
||||
jest.mock('providers/ErrorModalProvider', () => ({
|
||||
__esModule: true,
|
||||
...jest.requireActual('providers/ErrorModalProvider'),
|
||||
useErrorModal: jest.fn(() => ({
|
||||
showErrorModal,
|
||||
isErrorModalVisible: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
const mockFetchedUser = {
|
||||
data: {
|
||||
id: 'user-1',
|
||||
@@ -157,7 +147,6 @@ function renderDrawer(
|
||||
describe('EditMemberDrawer', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
showErrorModal.mockClear();
|
||||
server.use(
|
||||
rest.get(ROLES_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(listRolesSuccessResponse)),
|
||||
@@ -470,6 +459,7 @@ describe('EditMemberDrawer', () => {
|
||||
|
||||
it('shows API error message when deleteUser fails for active member', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const mockToast = jest.mocked(toast);
|
||||
|
||||
(useDeleteUser as jest.Mock).mockImplementation((options) => ({
|
||||
mutate: mockDeleteMutate.mockImplementation(() => {
|
||||
@@ -487,20 +477,16 @@ describe('EditMemberDrawer', () => {
|
||||
await user.click(confirmBtns[confirmBtns.length - 1]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(showErrorModal).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
getErrorMessage: expect.any(Function),
|
||||
}),
|
||||
);
|
||||
const passedError = showErrorModal.mock.calls[0][0] as any;
|
||||
expect(passedError.getErrorMessage()).toBe(
|
||||
'Something went wrong on server',
|
||||
expect(mockToast.error).toHaveBeenCalledWith(
|
||||
'Failed to delete member: Something went wrong on server',
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('shows API error message when deleteUser fails for invited member', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const mockToast = jest.mocked(toast);
|
||||
|
||||
(useDeleteUser as jest.Mock).mockImplementation((options) => ({
|
||||
mutate: mockDeleteMutate.mockImplementation(() => {
|
||||
@@ -518,14 +504,9 @@ describe('EditMemberDrawer', () => {
|
||||
await user.click(confirmBtns[confirmBtns.length - 1]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(showErrorModal).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
getErrorMessage: expect.any(Function),
|
||||
}),
|
||||
);
|
||||
const passedError = showErrorModal.mock.calls[0][0] as any;
|
||||
expect(passedError.getErrorMessage()).toBe(
|
||||
'Something went wrong on server',
|
||||
expect(mockToast.error).toHaveBeenCalledWith(
|
||||
'Failed to revoke invite: Something went wrong on server',
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -10,7 +10,6 @@ import { Select } from 'antd';
|
||||
import inviteUsers from 'api/v1/invite/bulk/create';
|
||||
import sendInvite from 'api/v1/invite/create';
|
||||
import { cloneDeep, debounce } from 'lodash-es';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
import { ROLES } from 'types/roles';
|
||||
import { EMAIL_REGEX } from 'utils/app';
|
||||
@@ -41,8 +40,6 @@ function InviteMembersModal({
|
||||
onClose,
|
||||
onComplete,
|
||||
}: InviteMembersModalProps): JSX.Element {
|
||||
const { showErrorModal, isErrorModalVisible } = useErrorModal();
|
||||
|
||||
const [rows, setRows] = useState<InviteRow[]>(() => [
|
||||
EMPTY_ROW(),
|
||||
EMPTY_ROW(),
|
||||
@@ -207,11 +204,13 @@ function InviteMembersModal({
|
||||
resetAndClose();
|
||||
onComplete?.();
|
||||
} catch (err) {
|
||||
showErrorModal(err as APIError);
|
||||
const apiErr = err as APIError;
|
||||
const errorMessage = apiErr?.getErrorMessage?.() ?? 'An error occurred';
|
||||
toast.error(errorMessage, { richColors: true, position: 'top-right' });
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
}
|
||||
}, [validateAllUsers, rows, resetAndClose, onComplete, showErrorModal]);
|
||||
}, [rows, onComplete, resetAndClose, validateAllUsers]);
|
||||
|
||||
const touchedRows = rows.filter(isRowTouched);
|
||||
const isSubmitDisabled = isSubmitting || touchedRows.length === 0;
|
||||
@@ -228,7 +227,7 @@ function InviteMembersModal({
|
||||
showCloseButton
|
||||
width="wide"
|
||||
className="invite-members-modal"
|
||||
disableOutsideClick={isErrorModalVisible}
|
||||
disableOutsideClick={false}
|
||||
>
|
||||
<div className="invite-members-modal__content">
|
||||
<div className="invite-members-modal__table">
|
||||
@@ -330,7 +329,6 @@ function InviteMembersModal({
|
||||
size="sm"
|
||||
onClick={handleSubmit}
|
||||
disabled={isSubmitDisabled}
|
||||
loading={isSubmitting}
|
||||
>
|
||||
{isSubmitting ? 'Inviting...' : 'Invite Team Members'}
|
||||
</Button>
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import inviteUsers from 'api/v1/invite/bulk/create';
|
||||
import sendInvite from 'api/v1/invite/create';
|
||||
import { StatusCodes } from 'http-status-codes';
|
||||
@@ -21,16 +22,6 @@ jest.mock('@signozhq/sonner', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
const showErrorModal = jest.fn();
|
||||
jest.mock('providers/ErrorModalProvider', () => ({
|
||||
__esModule: true,
|
||||
...jest.requireActual('providers/ErrorModalProvider'),
|
||||
useErrorModal: jest.fn(() => ({
|
||||
showErrorModal,
|
||||
isErrorModalVisible: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
const mockSendInvite = jest.mocked(sendInvite);
|
||||
const mockInviteUsers = jest.mocked(inviteUsers);
|
||||
|
||||
@@ -43,7 +34,6 @@ const defaultProps = {
|
||||
describe('InviteMembersModal', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
showErrorModal.mockClear();
|
||||
mockSendInvite.mockResolvedValue({
|
||||
httpStatusCode: 200,
|
||||
data: { data: 'test', status: 'success' },
|
||||
@@ -164,10 +154,9 @@ describe('InviteMembersModal', () => {
|
||||
describe('error handling', () => {
|
||||
it('shows BE message on single invite 409', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const error = makeApiError(
|
||||
'An invite already exists for this email: single@signoz.io',
|
||||
mockSendInvite.mockRejectedValue(
|
||||
makeApiError('An invite already exists for this email: single@signoz.io'),
|
||||
);
|
||||
mockSendInvite.mockRejectedValue(error);
|
||||
|
||||
render(<InviteMembersModal {...defaultProps} />);
|
||||
|
||||
@@ -182,16 +171,18 @@ describe('InviteMembersModal', () => {
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(showErrorModal).toHaveBeenCalledWith(error);
|
||||
expect(toast.error).toHaveBeenCalledWith(
|
||||
'An invite already exists for this email: single@signoz.io',
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('shows BE message on bulk invite 409', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const error = makeApiError(
|
||||
'An invite already exists for this email: alice@signoz.io',
|
||||
mockInviteUsers.mockRejectedValue(
|
||||
makeApiError('An invite already exists for this email: alice@signoz.io'),
|
||||
);
|
||||
mockInviteUsers.mockRejectedValue(error);
|
||||
|
||||
render(<InviteMembersModal {...defaultProps} />);
|
||||
|
||||
@@ -210,17 +201,18 @@ describe('InviteMembersModal', () => {
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(showErrorModal).toHaveBeenCalledWith(error);
|
||||
expect(toast.error).toHaveBeenCalledWith(
|
||||
'An invite already exists for this email: alice@signoz.io',
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('shows BE message on generic error', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const error = makeApiError(
|
||||
'Internal server error',
|
||||
StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
mockSendInvite.mockRejectedValue(
|
||||
makeApiError('Internal server error', StatusCodes.INTERNAL_SERVER_ERROR),
|
||||
);
|
||||
mockSendInvite.mockRejectedValue(error);
|
||||
|
||||
render(<InviteMembersModal {...defaultProps} />);
|
||||
|
||||
@@ -235,7 +227,10 @@ describe('InviteMembersModal', () => {
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(showErrorModal).toHaveBeenCalledWith(error);
|
||||
expect(toast.error).toHaveBeenCalledWith(
|
||||
'Internal server error',
|
||||
expect.anything(),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -210,7 +210,7 @@ function MembersTable({
|
||||
index % 2 === 0 ? 'members-table-row--tinted' : ''
|
||||
}
|
||||
onRow={(record): React.HTMLAttributes<HTMLElement> => {
|
||||
const isClickable = !!onRowClick;
|
||||
const isClickable = onRowClick && record.status !== MemberStatus.Deleted;
|
||||
return {
|
||||
onClick: (): void => {
|
||||
if (isClickable) {
|
||||
|
||||
@@ -86,7 +86,7 @@ describe('MembersTable', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('renders DELETED badge and calls onRowClick when a deleted member row is clicked', async () => {
|
||||
it('renders DELETED badge and does not call onRowClick when a deleted member row is clicked', async () => {
|
||||
const onRowClick = jest.fn();
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
const deletedMember: MemberRow = {
|
||||
@@ -108,7 +108,7 @@ describe('MembersTable', () => {
|
||||
|
||||
expect(screen.getByText('DELETED')).toBeInTheDocument();
|
||||
await user.click(screen.getByText('Dave Deleted'));
|
||||
expect(onRowClick).toHaveBeenCalledWith(
|
||||
expect(onRowClick).not.toHaveBeenCalledWith(
|
||||
expect.objectContaining({ id: 'user-del' }),
|
||||
);
|
||||
});
|
||||
|
||||
@@ -85,8 +85,7 @@ interface BaseProps {
|
||||
interface SingleProps extends BaseProps {
|
||||
mode?: 'single';
|
||||
value?: string;
|
||||
onChange?: (role: string | undefined) => void;
|
||||
allowClear?: boolean;
|
||||
onChange?: (role: string) => void;
|
||||
}
|
||||
|
||||
interface MultipleProps extends BaseProps {
|
||||
@@ -155,14 +154,13 @@ function RolesSelect(props: RolesSelectProps): JSX.Element {
|
||||
);
|
||||
}
|
||||
|
||||
const { value, onChange, allowClear = true } = props as SingleProps;
|
||||
const { value, onChange } = props as SingleProps;
|
||||
return (
|
||||
<Select
|
||||
id={id}
|
||||
value={value || undefined}
|
||||
onChange={onChange}
|
||||
placeholder={placeholder}
|
||||
allowClear={allowClear}
|
||||
className={cx('roles-single-select', className)}
|
||||
loading={loading}
|
||||
notFoundContent={notFoundContent}
|
||||
|
||||
@@ -17,8 +17,6 @@ import { AxiosError } from 'axios';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||
import { parseAsBoolean, useQueryState } from 'nuqs';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
import KeyCreatedPhase from './KeyCreatedPhase';
|
||||
import KeyFormPhase from './KeyFormPhase';
|
||||
@@ -29,7 +27,6 @@ import './AddKeyModal.styles.scss';
|
||||
|
||||
function AddKeyModal(): JSX.Element {
|
||||
const queryClient = useQueryClient();
|
||||
const { showErrorModal, isErrorModalVisible } = useErrorModal();
|
||||
const [accountId] = useQueryState(SA_QUERY_PARAMS.ACCOUNT);
|
||||
const [isAddKeyOpen, setIsAddKeyOpen] = useQueryState(
|
||||
SA_QUERY_PARAMS.ADD_KEY,
|
||||
@@ -84,11 +81,11 @@ function AddKeyModal(): JSX.Element {
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
) as APIError,
|
||||
);
|
||||
)?.getErrorMessage() || 'Failed to create key';
|
||||
toast.error(errMessage, { richColors: true });
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -154,7 +151,7 @@ function AddKeyModal(): JSX.Element {
|
||||
width="base"
|
||||
className="add-key-modal"
|
||||
showCloseButton
|
||||
disableOutsideClick={isErrorModalVisible}
|
||||
disableOutsideClick={false}
|
||||
>
|
||||
{phase === Phase.FORM && (
|
||||
<KeyFormPhase
|
||||
|
||||
@@ -16,12 +16,9 @@ import type {
|
||||
import { AxiosError } from 'axios';
|
||||
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||
import { parseAsBoolean, useQueryState } from 'nuqs';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
function DeleteAccountModal(): JSX.Element {
|
||||
const queryClient = useQueryClient();
|
||||
const { showErrorModal, isErrorModalVisible } = useErrorModal();
|
||||
const [accountId, setAccountId] = useQueryState(SA_QUERY_PARAMS.ACCOUNT);
|
||||
const [isDeleteOpen, setIsDeleteOpen] = useQueryState(
|
||||
SA_QUERY_PARAMS.DELETE_SA,
|
||||
@@ -48,11 +45,11 @@ function DeleteAccountModal(): JSX.Element {
|
||||
await invalidateListServiceAccounts(queryClient);
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
) as APIError,
|
||||
);
|
||||
)?.getErrorMessage() || 'Failed to delete service account';
|
||||
toast.error(errMessage, { richColors: true });
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -82,7 +79,7 @@ function DeleteAccountModal(): JSX.Element {
|
||||
width="narrow"
|
||||
className="alert-dialog sa-delete-dialog"
|
||||
showCloseButton={false}
|
||||
disableOutsideClick={isErrorModalVisible}
|
||||
disableOutsideClick={false}
|
||||
>
|
||||
<p className="sa-delete-dialog__body">
|
||||
Are you sure you want to delete <strong>{accountName}</strong>? This action
|
||||
|
||||
@@ -17,9 +17,7 @@ import { AxiosError } from 'axios';
|
||||
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||
import dayjs from 'dayjs';
|
||||
import { parseAsString, useQueryState } from 'nuqs';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
import { RevokeKeyContent } from '../RevokeKeyModal';
|
||||
import EditKeyForm from './EditKeyForm';
|
||||
@@ -43,7 +41,6 @@ function EditKeyModal({ keyItem }: EditKeyModalProps): JSX.Element {
|
||||
const open = !!editKeyId && !!selectedAccountId;
|
||||
|
||||
const { formatTimezoneAdjustedTimestamp } = useTimezone();
|
||||
const { showErrorModal, isErrorModalVisible } = useErrorModal();
|
||||
const [isRevokeConfirmOpen, setIsRevokeConfirmOpen] = useState(false);
|
||||
|
||||
const {
|
||||
@@ -81,11 +78,11 @@ function EditKeyModal({ keyItem }: EditKeyModalProps): JSX.Element {
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
) as APIError,
|
||||
);
|
||||
)?.getErrorMessage() || 'Failed to update key';
|
||||
toast.error(errMessage, { richColors: true });
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -105,13 +102,12 @@ function EditKeyModal({ keyItem }: EditKeyModalProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
},
|
||||
// eslint-disable-next-line sonarjs/no-identical-functions
|
||||
onError: (error) => {
|
||||
showErrorModal(
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
) as APIError,
|
||||
);
|
||||
)?.getErrorMessage() || 'Failed to revoke key';
|
||||
toast.error(errMessage, { richColors: true });
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -164,7 +160,7 @@ function EditKeyModal({ keyItem }: EditKeyModalProps): JSX.Element {
|
||||
isRevokeConfirmOpen ? 'alert-dialog delete-dialog' : 'edit-key-modal'
|
||||
}
|
||||
showCloseButton={!isRevokeConfirmOpen}
|
||||
disableOutsideClick={isErrorModalVisible}
|
||||
disableOutsideClick={false}
|
||||
>
|
||||
{isRevokeConfirmOpen ? (
|
||||
<RevokeKeyContent
|
||||
|
||||
@@ -17,7 +17,7 @@ interface OverviewTabProps {
|
||||
localName: string;
|
||||
onNameChange: (v: string) => void;
|
||||
localRole: string;
|
||||
onRoleChange: (v: string | undefined) => void;
|
||||
onRoleChange: (v: string) => void;
|
||||
isDisabled: boolean;
|
||||
availableRoles: AuthtypesRoleDTO[];
|
||||
rolesLoading?: boolean;
|
||||
|
||||
@@ -16,8 +16,6 @@ import type {
|
||||
import { AxiosError } from 'axios';
|
||||
import { SA_QUERY_PARAMS } from 'container/ServiceAccountsSettings/constants';
|
||||
import { parseAsString, useQueryState } from 'nuqs';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
export interface RevokeKeyContentProps {
|
||||
isRevoking: boolean;
|
||||
@@ -58,7 +56,6 @@ export function RevokeKeyContent({
|
||||
|
||||
function RevokeKeyModal(): JSX.Element {
|
||||
const queryClient = useQueryClient();
|
||||
const { showErrorModal, isErrorModalVisible } = useErrorModal();
|
||||
const [accountId] = useQueryState(SA_QUERY_PARAMS.ACCOUNT);
|
||||
const [revokeKeyId, setRevokeKeyId] = useQueryState(
|
||||
SA_QUERY_PARAMS.REVOKE_KEY,
|
||||
@@ -86,11 +83,11 @@ function RevokeKeyModal(): JSX.Element {
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(
|
||||
const errMessage =
|
||||
convertToApiError(
|
||||
error as AxiosError<RenderErrorResponseDTO, unknown> | null,
|
||||
) as APIError,
|
||||
);
|
||||
)?.getErrorMessage() || 'Failed to revoke key';
|
||||
toast.error(errMessage, { richColors: true });
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -118,7 +115,7 @@ function RevokeKeyModal(): JSX.Element {
|
||||
width="narrow"
|
||||
className="alert-dialog delete-dialog"
|
||||
showCloseButton={false}
|
||||
disableOutsideClick={isErrorModalVisible}
|
||||
disableOutsideClick={false}
|
||||
>
|
||||
<RevokeKeyContent
|
||||
isRevoking={isRevoking}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQueryClient } from 'react-query';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { DrawerWrapper } from '@signozhq/drawer';
|
||||
@@ -8,9 +8,7 @@ import { ToggleGroup, ToggleGroupItem } from '@signozhq/toggle-group';
|
||||
import { Pagination, Skeleton } from 'antd';
|
||||
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||
import {
|
||||
getGetServiceAccountRolesQueryKey,
|
||||
getListServiceAccountsQueryKey,
|
||||
useDeleteServiceAccountRole,
|
||||
useGetServiceAccount,
|
||||
useListServiceAccountKeys,
|
||||
useUpdateServiceAccount,
|
||||
@@ -25,10 +23,7 @@ import {
|
||||
ServiceAccountStatus,
|
||||
toServiceAccountRow,
|
||||
} from 'container/ServiceAccountsSettings/utils';
|
||||
import {
|
||||
RoleUpdateFailure,
|
||||
useServiceAccountRoleManager,
|
||||
} from 'hooks/serviceAccount/useServiceAccountRoleManager';
|
||||
import { useServiceAccountRoleManager } from 'hooks/serviceAccount/useServiceAccountRoleManager';
|
||||
import {
|
||||
parseAsBoolean,
|
||||
parseAsInteger,
|
||||
@@ -37,7 +32,7 @@ import {
|
||||
useQueryState,
|
||||
} from 'nuqs';
|
||||
import APIError from 'types/api/error';
|
||||
import { retryOn429, toAPIError } from 'utils/errorUtils';
|
||||
import { toAPIError } from 'utils/errorUtils';
|
||||
|
||||
import AddKeyModal from './AddKeyModal';
|
||||
import DeleteAccountModal from './DeleteAccountModal';
|
||||
@@ -54,13 +49,6 @@ export interface ServiceAccountDrawerProps {
|
||||
|
||||
const PAGE_SIZE = 15;
|
||||
|
||||
function toSaveApiError(err: unknown): APIError {
|
||||
return (
|
||||
convertToApiError(err as AxiosError<RenderErrorResponseDTO>) ??
|
||||
toAPIError(err as AxiosError<RenderErrorResponseDTO>)
|
||||
);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function ServiceAccountDrawer({
|
||||
onSuccess,
|
||||
@@ -115,35 +103,21 @@ function ServiceAccountDrawer({
|
||||
[accountData],
|
||||
);
|
||||
|
||||
const {
|
||||
currentRoles,
|
||||
isLoading: isRolesLoading,
|
||||
applyDiff,
|
||||
} = useServiceAccountRoleManager(selectedAccountId ?? '');
|
||||
|
||||
const roleSessionRef = useRef<string | null>(null);
|
||||
const { currentRoles, applyDiff } = useServiceAccountRoleManager(
|
||||
selectedAccountId ?? '',
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (account?.id) {
|
||||
setLocalName(account?.name ?? '');
|
||||
setKeysPage(1);
|
||||
}
|
||||
setSaveErrors([]);
|
||||
}, [account?.id, account?.name, setKeysPage]);
|
||||
|
||||
useEffect(() => {
|
||||
if (account?.id) {
|
||||
setSaveErrors([]);
|
||||
}
|
||||
}, [account?.id]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!account?.id) {
|
||||
roleSessionRef.current = null;
|
||||
} else if (account.id !== roleSessionRef.current && !isRolesLoading) {
|
||||
setLocalRole(currentRoles[0]?.id ?? '');
|
||||
roleSessionRef.current = account.id;
|
||||
}
|
||||
}, [account?.id, currentRoles, isRolesLoading]);
|
||||
setLocalRole(currentRoles[0]?.id ?? '');
|
||||
}, [currentRoles]);
|
||||
|
||||
const isDeleted =
|
||||
account?.status?.toUpperCase() === ServiceAccountStatus.Deleted;
|
||||
@@ -179,26 +153,12 @@ function ServiceAccountDrawer({
|
||||
|
||||
// the retry for this mutation is safe due to the api being idempotent on backend
|
||||
const { mutateAsync: updateMutateAsync } = useUpdateServiceAccount();
|
||||
const { mutateAsync: deleteRole } = useDeleteServiceAccountRole({
|
||||
mutation: {
|
||||
retry: retryOn429,
|
||||
},
|
||||
});
|
||||
|
||||
const executeRolesOperation = useCallback(
|
||||
async (accountId: string): Promise<RoleUpdateFailure[]> => {
|
||||
if (localRole === '' && currentRoles[0]?.id) {
|
||||
await deleteRole({
|
||||
pathParams: { id: accountId, rid: currentRoles[0].id },
|
||||
});
|
||||
await queryClient.invalidateQueries(
|
||||
getGetServiceAccountRolesQueryKey({ id: accountId }),
|
||||
);
|
||||
return [];
|
||||
}
|
||||
return applyDiff([localRole].filter(Boolean), availableRoles);
|
||||
},
|
||||
[localRole, currentRoles, availableRoles, applyDiff, deleteRole, queryClient],
|
||||
const toSaveApiError = useCallback(
|
||||
(err: unknown): APIError =>
|
||||
convertToApiError(err as AxiosError<RenderErrorResponseDTO>) ??
|
||||
toAPIError(err as AxiosError<RenderErrorResponseDTO>),
|
||||
[],
|
||||
);
|
||||
|
||||
const retryNameUpdate = useCallback(async (): Promise<void> => {
|
||||
@@ -220,7 +180,14 @@ function ServiceAccountDrawer({
|
||||
),
|
||||
);
|
||||
}
|
||||
}, [account, localName, updateMutateAsync, refetchAccount, queryClient]);
|
||||
}, [
|
||||
account,
|
||||
localName,
|
||||
updateMutateAsync,
|
||||
refetchAccount,
|
||||
queryClient,
|
||||
toSaveApiError,
|
||||
]);
|
||||
|
||||
const handleNameChange = useCallback((name: string): void => {
|
||||
setLocalName(name);
|
||||
@@ -243,39 +210,29 @@ function ServiceAccountDrawer({
|
||||
);
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const clearRoleErrors = useCallback((): void => {
|
||||
setSaveErrors((prev) =>
|
||||
prev.filter(
|
||||
(e) => e.context !== 'Roles update' && !e.context.startsWith("Role '"),
|
||||
),
|
||||
);
|
||||
}, []);
|
||||
|
||||
const failuresToSaveErrors = useCallback(
|
||||
(failures: RoleUpdateFailure[]): SaveError[] =>
|
||||
failures.map((f) => {
|
||||
const ctx = `Role '${f.roleName}'`;
|
||||
return {
|
||||
context: ctx,
|
||||
apiError: toSaveApiError(f.error),
|
||||
onRetry: makeRoleRetry(ctx, f.onRetry),
|
||||
};
|
||||
}),
|
||||
[makeRoleRetry],
|
||||
[toSaveApiError],
|
||||
);
|
||||
|
||||
const retryRolesUpdate = useCallback(async (): Promise<void> => {
|
||||
try {
|
||||
const failures = await executeRolesOperation(selectedAccountId ?? '');
|
||||
const failures = await applyDiff(
|
||||
[localRole].filter(Boolean),
|
||||
availableRoles,
|
||||
);
|
||||
if (failures.length === 0) {
|
||||
setSaveErrors((prev) => prev.filter((e) => e.context !== 'Roles update'));
|
||||
} else {
|
||||
setSaveErrors((prev) => {
|
||||
const rest = prev.filter((e) => e.context !== 'Roles update');
|
||||
return [...rest, ...failuresToSaveErrors(failures)];
|
||||
const roleErrors = failures.map((f) => {
|
||||
const ctx = `Role '${f.roleName}'`;
|
||||
return {
|
||||
context: ctx,
|
||||
apiError: toSaveApiError(f.error),
|
||||
onRetry: makeRoleRetry(ctx, f.onRetry),
|
||||
};
|
||||
});
|
||||
return [...rest, ...roleErrors];
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -285,7 +242,7 @@ function ServiceAccountDrawer({
|
||||
),
|
||||
);
|
||||
}
|
||||
}, [selectedAccountId, executeRolesOperation, failuresToSaveErrors]);
|
||||
}, [localRole, availableRoles, applyDiff, toSaveApiError, makeRoleRetry]);
|
||||
|
||||
const handleSave = useCallback(async (): Promise<void> => {
|
||||
if (!account || !isDirty) {
|
||||
@@ -304,7 +261,7 @@ function ServiceAccountDrawer({
|
||||
|
||||
const [nameResult, rolesResult] = await Promise.allSettled([
|
||||
namePromise,
|
||||
executeRolesOperation(account.id),
|
||||
applyDiff([localRole].filter(Boolean), availableRoles),
|
||||
]);
|
||||
|
||||
const errors: SaveError[] = [];
|
||||
@@ -324,7 +281,14 @@ function ServiceAccountDrawer({
|
||||
onRetry: retryRolesUpdate,
|
||||
});
|
||||
} else {
|
||||
errors.push(...failuresToSaveErrors(rolesResult.value));
|
||||
for (const failure of rolesResult.value) {
|
||||
const context = `Role '${failure.roleName}'`;
|
||||
errors.push({
|
||||
context,
|
||||
apiError: toSaveApiError(failure.error),
|
||||
onRetry: makeRoleRetry(context, failure.onRetry),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
@@ -346,14 +310,17 @@ function ServiceAccountDrawer({
|
||||
account,
|
||||
isDirty,
|
||||
localName,
|
||||
localRole,
|
||||
availableRoles,
|
||||
updateMutateAsync,
|
||||
executeRolesOperation,
|
||||
applyDiff,
|
||||
refetchAccount,
|
||||
onSuccess,
|
||||
queryClient,
|
||||
toSaveApiError,
|
||||
retryNameUpdate,
|
||||
makeRoleRetry,
|
||||
retryRolesUpdate,
|
||||
failuresToSaveErrors,
|
||||
]);
|
||||
|
||||
const handleClose = useCallback((): void => {
|
||||
@@ -446,10 +413,7 @@ function ServiceAccountDrawer({
|
||||
localName={localName}
|
||||
onNameChange={handleNameChange}
|
||||
localRole={localRole}
|
||||
onRoleChange={(role): void => {
|
||||
setLocalRole(role ?? '');
|
||||
clearRoleErrors();
|
||||
}}
|
||||
onRoleChange={setLocalRole}
|
||||
isDisabled={isDeleted}
|
||||
availableRoles={availableRoles}
|
||||
rolesLoading={rolesLoading}
|
||||
|
||||
@@ -390,42 +390,6 @@ describe('ServiceAccountDrawer – save-error UX', () => {
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('role add retries on 429 then succeeds without showing an error', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
let roleAddCallCount = 0;
|
||||
|
||||
// First call → 429, second call → 200
|
||||
server.use(
|
||||
rest.post(SA_ROLES_ENDPOINT, (_, res, ctx) => {
|
||||
roleAddCallCount += 1;
|
||||
if (roleAddCallCount === 1) {
|
||||
return res(ctx.status(429), ctx.json({ message: 'Too Many Requests' }));
|
||||
}
|
||||
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
|
||||
}),
|
||||
);
|
||||
|
||||
renderDrawer();
|
||||
|
||||
await screen.findByDisplayValue('CI Bot');
|
||||
|
||||
await user.click(screen.getByLabelText('Roles'));
|
||||
await user.click(await screen.findByTitle('signoz-viewer'));
|
||||
|
||||
const saveBtn = screen.getByRole('button', { name: /Save Changes/i });
|
||||
await waitFor(() => expect(saveBtn).not.toBeDisabled());
|
||||
await user.click(saveBtn);
|
||||
|
||||
// Retried after 429 — at least 2 calls, no error shown
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(roleAddCallCount).toBeGreaterThanOrEqual(2);
|
||||
},
|
||||
{ timeout: 5000 },
|
||||
);
|
||||
expect(screen.queryByText(/role assign failed/i)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('clicking Retry on a name-update error re-triggers the request; on success the error item is removed', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ import uPlot from 'uplot';
|
||||
|
||||
import { ChartProps } from '../types';
|
||||
|
||||
const TOOLTIP_WIDTH_PADDING = 120;
|
||||
const TOOLTIP_WIDTH_PADDING = 60;
|
||||
const TOOLTIP_MIN_WIDTH = 200;
|
||||
|
||||
export default function ChartWrapper({
|
||||
|
||||
@@ -264,22 +264,20 @@ export default function Home(): JSX.Element {
|
||||
|
||||
return (
|
||||
<div className="home-container">
|
||||
{user?.role === USER_ROLES.ADMIN && (
|
||||
<PersistedAnnouncementBanner
|
||||
type="info"
|
||||
storageKey={LOCALSTORAGE.DISMISSED_API_KEYS_DEPRECATION_BANNER}
|
||||
action={{
|
||||
label: 'Go to Service Accounts',
|
||||
onClick: (): void => history.push(ROUTES.SERVICE_ACCOUNTS_SETTINGS),
|
||||
}}
|
||||
>
|
||||
<>
|
||||
<strong>API keys</strong> have been deprecated in favour of{' '}
|
||||
<strong>Service accounts</strong>. The existing API Keys have been
|
||||
migrated to service accounts.
|
||||
</>
|
||||
</PersistedAnnouncementBanner>
|
||||
)}
|
||||
<PersistedAnnouncementBanner
|
||||
type="info"
|
||||
storageKey={LOCALSTORAGE.DISMISSED_API_KEYS_DEPRECATION_BANNER}
|
||||
action={{
|
||||
label: 'Go to Service Accounts',
|
||||
onClick: (): void => history.push(ROUTES.SERVICE_ACCOUNTS_SETTINGS),
|
||||
}}
|
||||
>
|
||||
<>
|
||||
<strong>API keys</strong> have been deprecated in favour of{' '}
|
||||
<strong>Service accounts</strong>. The existing API Keys have been migrated
|
||||
to service accounts.
|
||||
</>
|
||||
</PersistedAnnouncementBanner>
|
||||
|
||||
<div className="sticky-header">
|
||||
<Header
|
||||
|
||||
@@ -51,8 +51,6 @@ function MembersSettings(): JSX.Element {
|
||||
|
||||
if (filterMode === FilterMode.Invited) {
|
||||
result = result.filter((m) => m.status === MemberStatus.Invited);
|
||||
} else if (filterMode === FilterMode.Deleted) {
|
||||
result = result.filter((m) => m.status === MemberStatus.Deleted);
|
||||
}
|
||||
|
||||
if (searchQuery.trim()) {
|
||||
@@ -91,9 +89,6 @@ function MembersSettings(): JSX.Element {
|
||||
const pendingCount = allMembers.filter(
|
||||
(m) => m.status === MemberStatus.Invited,
|
||||
).length;
|
||||
const deletedCount = allMembers.filter(
|
||||
(m) => m.status === MemberStatus.Deleted,
|
||||
).length;
|
||||
const totalCount = allMembers.length;
|
||||
|
||||
const filterMenuItems: MenuProps['items'] = [
|
||||
@@ -123,27 +118,12 @@ function MembersSettings(): JSX.Element {
|
||||
setPage(1);
|
||||
},
|
||||
},
|
||||
{
|
||||
key: FilterMode.Deleted,
|
||||
label: (
|
||||
<div className="members-filter-option">
|
||||
<span>Deleted ⎯ {deletedCount}</span>
|
||||
{filterMode === FilterMode.Deleted && <Check size={14} />}
|
||||
</div>
|
||||
),
|
||||
onClick: (): void => {
|
||||
setFilterMode(FilterMode.Deleted);
|
||||
setPage(1);
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const filterLabel =
|
||||
filterMode === FilterMode.All
|
||||
? `All members ⎯ ${totalCount}`
|
||||
: filterMode === FilterMode.Invited
|
||||
? `Pending invites ⎯ ${pendingCount}`
|
||||
: `Deleted ⎯ ${deletedCount}`;
|
||||
: `Pending invites ⎯ ${pendingCount}`;
|
||||
|
||||
const handleInviteComplete = useCallback((): void => {
|
||||
refetchUsers();
|
||||
|
||||
@@ -117,14 +117,14 @@ describe('MembersSettings (integration)', () => {
|
||||
await screen.findByText('Member Details');
|
||||
});
|
||||
|
||||
it('opens EditMemberDrawer when a deleted member row is clicked', async () => {
|
||||
it('does not open EditMemberDrawer when a deleted member row is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<MembersSettings />);
|
||||
|
||||
await user.click(await screen.findByText('Dave Deleted'));
|
||||
|
||||
expect(screen.queryByText('Member Details')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Member Details')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens InviteMembersModal when "Invite member" button is clicked', async () => {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
export enum FilterMode {
|
||||
All = 'all',
|
||||
Invited = 'invited',
|
||||
Deleted = 'deleted',
|
||||
}
|
||||
|
||||
export enum MemberStatus {
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
.display-name-form {
|
||||
.form-field {
|
||||
margin-bottom: var(--spacing-8);
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: var(--spacing-4);
|
||||
}
|
||||
}
|
||||
|
||||
.field-error {
|
||||
color: var(--destructive);
|
||||
margin-top: var(--spacing-2);
|
||||
font-size: var(--font-size-xs);
|
||||
}
|
||||
}
|
||||
@@ -1,78 +0,0 @@
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import {
|
||||
fireEvent,
|
||||
render,
|
||||
screen,
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
|
||||
import DisplayName from '../index';
|
||||
|
||||
jest.mock('@signozhq/sonner', () => ({
|
||||
toast: {
|
||||
success: jest.fn(),
|
||||
error: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
const ORG_ME_ENDPOINT = '*/api/v2/orgs/me';
|
||||
|
||||
const defaultProps = { index: 0, id: 'does-not-matter-id' };
|
||||
|
||||
describe('DisplayName', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
});
|
||||
|
||||
it('renders form pre-filled with org displayName from context', async () => {
|
||||
render(<DisplayName {...defaultProps} />);
|
||||
|
||||
const input = await screen.findByRole('textbox');
|
||||
expect(input).toHaveValue('Pentagon');
|
||||
|
||||
expect(screen.getByRole('button', { name: /submit/i })).toBeDisabled();
|
||||
});
|
||||
|
||||
it('enables submit and calls PUT when display name is changed', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
server.use(rest.put(ORG_ME_ENDPOINT, (_, res, ctx) => res(ctx.status(200))));
|
||||
|
||||
render(<DisplayName {...defaultProps} />);
|
||||
|
||||
const input = await screen.findByRole('textbox');
|
||||
await user.clear(input);
|
||||
await user.type(input, 'New Org Name');
|
||||
|
||||
const submitBtn = screen.getByRole('button', { name: /submit/i });
|
||||
expect(submitBtn).toBeEnabled();
|
||||
|
||||
await user.click(submitBtn);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(toast.success).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('shows validation error when display name is cleared and submitted', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<DisplayName {...defaultProps} />);
|
||||
|
||||
const input = await screen.findByRole('textbox');
|
||||
await user.clear(input);
|
||||
|
||||
const form = input.closest('form') as HTMLFormElement;
|
||||
fireEvent.submit(form);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/missing display name/i)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,57 +1,21 @@
|
||||
import { useEffect } from 'react';
|
||||
import { Controller, useForm } from 'react-hook-form';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { Button, Input } from 'antd';
|
||||
import { Button, Form, Input } from 'antd';
|
||||
import { convertToApiError } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||
import {
|
||||
useGetMyOrganization,
|
||||
useUpdateMyOrganization,
|
||||
} from 'api/generated/services/orgs';
|
||||
import { useUpdateMyOrganization } from 'api/generated/services/orgs';
|
||||
import type { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { AxiosError } from 'axios';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
import APIError from 'types/api/error';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { requireErrorMessage } from 'utils/form/requireErrorMessage';
|
||||
|
||||
import './DisplayName.styles.scss';
|
||||
|
||||
function DisplayName({ index, id: orgId }: DisplayNameProps): JSX.Element {
|
||||
const [form] = Form.useForm<FormValues>();
|
||||
const orgName = Form.useWatch('displayName', form);
|
||||
|
||||
const { t } = useTranslation(['organizationsettings', 'common']);
|
||||
const { showErrorModal } = useErrorModal();
|
||||
const { org, updateOrg, user } = useAppContext();
|
||||
const currentOrg = (org || [])[index];
|
||||
const isAdmin = user.role === USER_ROLES.ADMIN;
|
||||
|
||||
const { data: orgData } = useGetMyOrganization({
|
||||
query: {
|
||||
enabled: isAdmin && !currentOrg?.displayName,
|
||||
},
|
||||
});
|
||||
|
||||
const displayName =
|
||||
currentOrg?.displayName ?? orgData?.data?.displayName ?? '';
|
||||
|
||||
const {
|
||||
control,
|
||||
handleSubmit,
|
||||
watch,
|
||||
getValues,
|
||||
setValue,
|
||||
} = useForm<FormValues>({
|
||||
defaultValues: { displayName },
|
||||
});
|
||||
|
||||
const orgName = watch('displayName');
|
||||
|
||||
useEffect(() => {
|
||||
if (displayName && !getValues('displayName')) {
|
||||
setValue('displayName', displayName);
|
||||
}
|
||||
}, [displayName, getValues, setValue]);
|
||||
const { org, updateOrg } = useAppContext();
|
||||
const { displayName } = (org || [])[index];
|
||||
|
||||
const {
|
||||
mutateAsync: updateMyOrganization,
|
||||
@@ -66,16 +30,20 @@ function DisplayName({ index, id: orgId }: DisplayNameProps): JSX.Element {
|
||||
updateOrg(orgId, data.displayName ?? '');
|
||||
},
|
||||
onError: (error) => {
|
||||
showErrorModal(
|
||||
convertToApiError(error as AxiosError<RenderErrorResponseDTO>) as APIError,
|
||||
const apiError = convertToApiError(
|
||||
error as AxiosError<RenderErrorResponseDTO>,
|
||||
);
|
||||
toast.error(
|
||||
apiError?.getErrorMessage() ?? t('something_went_wrong', { ns: 'common' }),
|
||||
{ richColors: true, position: 'top-right' },
|
||||
);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const onSubmit = async (values: FormValues): Promise<void> => {
|
||||
const { displayName: name } = values;
|
||||
await updateMyOrganization({ data: { id: orgId, displayName: name } });
|
||||
const { displayName } = values;
|
||||
await updateMyOrganization({ data: { id: orgId, displayName } });
|
||||
};
|
||||
|
||||
if (!org) {
|
||||
@@ -85,34 +53,21 @@ function DisplayName({ index, id: orgId }: DisplayNameProps): JSX.Element {
|
||||
const isDisabled = isLoading || orgName === displayName || !orgName;
|
||||
|
||||
return (
|
||||
<form
|
||||
className="display-name-form"
|
||||
onSubmit={handleSubmit(onSubmit)}
|
||||
<Form
|
||||
initialValues={{ displayName }}
|
||||
form={form}
|
||||
layout="vertical"
|
||||
onFinish={onSubmit}
|
||||
autoComplete="off"
|
||||
>
|
||||
<div className="form-field">
|
||||
<label htmlFor="displayName">Display name</label>
|
||||
<Controller
|
||||
name="displayName"
|
||||
control={control}
|
||||
rules={{ required: requireErrorMessage('Display name') }}
|
||||
render={({ field, fieldState }): JSX.Element => (
|
||||
<>
|
||||
<Input
|
||||
{...field}
|
||||
id="displayName"
|
||||
size="large"
|
||||
placeholder={t('signoz')}
|
||||
status={fieldState.error ? 'error' : ''}
|
||||
/>
|
||||
{fieldState.error && (
|
||||
<div className="field-error">{fieldState.error.message}</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<Form.Item
|
||||
name="displayName"
|
||||
label="Display name"
|
||||
rules={[{ required: true, message: requireErrorMessage('Display name') }]}
|
||||
>
|
||||
<Input size="large" placeholder={t('signoz')} />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button
|
||||
loading={isLoading}
|
||||
disabled={isDisabled}
|
||||
@@ -121,8 +76,8 @@ function DisplayName({ index, id: orgId }: DisplayNameProps): JSX.Element {
|
||||
>
|
||||
Submit
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { useGetUser, useSetRoleByUserID } from 'api/generated/services/users';
|
||||
import { retryOn429 } from 'utils/errorUtils';
|
||||
|
||||
export interface MemberRoleUpdateFailure {
|
||||
roleName: string;
|
||||
@@ -39,9 +38,7 @@ export function useMemberRoleManager(
|
||||
[currentUserRoles],
|
||||
);
|
||||
|
||||
const { mutateAsync: setRole } = useSetRoleByUserID({
|
||||
mutation: { retry: retryOn429 },
|
||||
});
|
||||
const { mutateAsync: setRole } = useSetRoleByUserID();
|
||||
|
||||
const applyDiff = useCallback(
|
||||
async (
|
||||
|
||||
@@ -6,12 +6,6 @@ import {
|
||||
useGetServiceAccountRoles,
|
||||
} from 'api/generated/services/serviceaccount';
|
||||
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { retryOn429 } from 'utils/errorUtils';
|
||||
|
||||
const enum PromiseStatus {
|
||||
Fulfilled = 'fulfilled',
|
||||
Rejected = 'rejected',
|
||||
}
|
||||
|
||||
export interface RoleUpdateFailure {
|
||||
roleName: string;
|
||||
@@ -40,9 +34,7 @@ export function useServiceAccountRoleManager(
|
||||
]);
|
||||
|
||||
// the retry for these mutations is safe due to being idempotent on backend
|
||||
const { mutateAsync: createRole } = useCreateServiceAccountRole({
|
||||
mutation: { retry: retryOn429 },
|
||||
});
|
||||
const { mutateAsync: createRole } = useCreateServiceAccountRole();
|
||||
|
||||
const invalidateRoles = useCallback(
|
||||
() =>
|
||||
@@ -81,16 +73,11 @@ export function useServiceAccountRoleManager(
|
||||
allOperations.map((op) => op.run()),
|
||||
);
|
||||
|
||||
const successCount = results.filter(
|
||||
(r) => r.status === PromiseStatus.Fulfilled,
|
||||
).length;
|
||||
if (successCount > 0) {
|
||||
await invalidateRoles();
|
||||
}
|
||||
await invalidateRoles();
|
||||
|
||||
const failures: RoleUpdateFailure[] = [];
|
||||
results.forEach((result, index) => {
|
||||
if (result.status === PromiseStatus.Rejected) {
|
||||
if (result.status === 'rejected') {
|
||||
const { role, run } = allOperations[index];
|
||||
failures.push({
|
||||
roleName: role.name ?? 'unknown',
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
.uplot-tooltip-container {
|
||||
font-family: 'Inter';
|
||||
font-size: 12px;
|
||||
background: var(--bg-ink-300);
|
||||
-webkit-font-smoothing: antialiased;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-radius: 6px;
|
||||
border: 1px solid var(--bg-ink-100);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
&.lightMode {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-divider {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-header-container {
|
||||
padding: 1rem 1rem 0 1rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
&:last-child {
|
||||
padding-bottom: 1rem;
|
||||
}
|
||||
|
||||
.uplot-tooltip-header {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-divider {
|
||||
width: 100%;
|
||||
height: 1px;
|
||||
background-color: var(--bg-ink-100);
|
||||
}
|
||||
|
||||
.uplot-tooltip-list {
|
||||
// Virtuoso absolutely positions its item rows; left: 0 prevents accidental
|
||||
// horizontal offset when the scroller has padding or transform applied.
|
||||
div[data-viewport-type='element'] {
|
||||
left: 0;
|
||||
padding: 4px 8px 4px 16px;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
.uplot-tooltip-container {
|
||||
font-family: 'Inter';
|
||||
font-size: 12px;
|
||||
background: var(--bg-ink-300);
|
||||
-webkit-font-smoothing: antialiased;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-radius: 6px;
|
||||
padding: 1rem 0.5rem 0.5rem 1rem;
|
||||
border: 1px solid var(--bg-ink-100);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
&.lightMode {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-header {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.uplot-tooltip-list-container {
|
||||
overflow-y: auto;
|
||||
max-height: 330px;
|
||||
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
|
||||
.uplot-tooltip-item-marker {
|
||||
border-radius: 50%;
|
||||
border-width: 2px;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.uplot-tooltip-item-content {
|
||||
white-space: wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,14 +7,12 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
|
||||
import { TooltipProps } from '../types';
|
||||
import TooltipItem from './components/TooltipItem/TooltipItem';
|
||||
|
||||
import Styles from './Tooltip.module.scss';
|
||||
import './Tooltip.styles.scss';
|
||||
|
||||
// Fallback per-item height used for the initial size estimate before
|
||||
// Virtuoso reports the real total height via totalListHeightChanged.
|
||||
const TOOLTIP_LIST_MAX_HEIGHT = 330;
|
||||
const TOOLTIP_ITEM_HEIGHT = 38;
|
||||
const LIST_MAX_HEIGHT = 300;
|
||||
const TOOLTIP_LIST_PADDING = 10;
|
||||
|
||||
export default function Tooltip({
|
||||
uPlotInstance,
|
||||
@@ -23,26 +21,27 @@ export default function Tooltip({
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [listHeight, setListHeight] = useState(0);
|
||||
const tooltipContent = content ?? [];
|
||||
const { timezone: userTimezone } = useTimezone();
|
||||
const [totalListHeight, setTotalListHeight] = useState(0);
|
||||
|
||||
const tooltipContent = useMemo(() => content ?? [], [content]);
|
||||
|
||||
const resolvedTimezone = timezone?.value ?? userTimezone.value;
|
||||
const resolvedTimezone = useMemo(() => {
|
||||
if (!timezone) {
|
||||
return userTimezone.value;
|
||||
}
|
||||
return timezone.value;
|
||||
}, [timezone, userTimezone]);
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
if (!showTooltipHeader) {
|
||||
return null;
|
||||
}
|
||||
const data = uPlotInstance.data;
|
||||
const cursorIdx = uPlotInstance.cursor.idx;
|
||||
if (cursorIdx == null) {
|
||||
return null;
|
||||
}
|
||||
const timestamp = uPlotInstance.data[0]?.[cursorIdx];
|
||||
if (timestamp == null) {
|
||||
return null;
|
||||
}
|
||||
return dayjs(timestamp * 1000)
|
||||
return dayjs(data[0][cursorIdx] * 1000)
|
||||
.tz(resolvedTimezone)
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
}, [
|
||||
@@ -52,68 +51,60 @@ export default function Tooltip({
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
const activeItem = useMemo(
|
||||
() => tooltipContent.find((item) => item.isActive) ?? null,
|
||||
[tooltipContent],
|
||||
);
|
||||
|
||||
// Use the measured height from Virtuoso when available; fall back to a
|
||||
// per-item estimate on the first render. Math.ceil prevents a 1 px
|
||||
// subpixel rounding gap from triggering a spurious scrollbar.
|
||||
const virtuosoHeight = useMemo(() => {
|
||||
return totalListHeight > 0
|
||||
? Math.ceil(Math.min(totalListHeight, LIST_MAX_HEIGHT))
|
||||
: Math.min(tooltipContent.length * TOOLTIP_ITEM_HEIGHT, LIST_MAX_HEIGHT);
|
||||
}, [totalListHeight, tooltipContent.length]);
|
||||
|
||||
const showHeader = showTooltipHeader || activeItem != null;
|
||||
// With a single series the active item is fully represented in the header —
|
||||
// hide the divider and list to avoid showing a duplicate row.
|
||||
const showList = tooltipContent.length > 1;
|
||||
const showDivider = showList && showHeader;
|
||||
const virtuosoStyle = useMemo(() => {
|
||||
return {
|
||||
height:
|
||||
listHeight > 0
|
||||
? Math.min(listHeight + TOOLTIP_LIST_PADDING, TOOLTIP_LIST_MAX_HEIGHT)
|
||||
: Math.min(
|
||||
tooltipContent.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
width: '100%',
|
||||
};
|
||||
}, [listHeight, tooltipContent.length]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(Styles.uplotTooltipContainer, !isDarkMode && Styles.lightMode)}
|
||||
className={cx(
|
||||
'uplot-tooltip-container',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
data-testid="uplot-tooltip-container"
|
||||
>
|
||||
{showHeader && (
|
||||
<div className={Styles.uplotTooltipHeaderContainer}>
|
||||
{showTooltipHeader && headerTitle && (
|
||||
<div
|
||||
className={Styles.uplotTooltipHeader}
|
||||
data-testid="uplot-tooltip-header"
|
||||
>
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{activeItem && (
|
||||
<TooltipItem
|
||||
item={activeItem}
|
||||
isItemActive={true}
|
||||
containerTestId="uplot-tooltip-pinned"
|
||||
markerTestId="uplot-tooltip-pinned-marker"
|
||||
contentTestId="uplot-tooltip-pinned-content"
|
||||
/>
|
||||
)}
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header" data-testid="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showDivider && <span className={Styles.uplotTooltipDivider} />}
|
||||
|
||||
{showList && (
|
||||
<Virtuoso
|
||||
className={Styles.uplotTooltipList}
|
||||
data-testid="uplot-tooltip-list"
|
||||
data={tooltipContent}
|
||||
style={{ height: virtuosoHeight, width: '100%' }}
|
||||
totalListHeightChanged={setTotalListHeight}
|
||||
itemContent={(_, item): JSX.Element => (
|
||||
<TooltipItem item={item} isItemActive={false} />
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
<div className="uplot-tooltip-list-container">
|
||||
{tooltipContent.length > 0 ? (
|
||||
<Virtuoso
|
||||
className="uplot-tooltip-list"
|
||||
data-testid="uplot-tooltip-list"
|
||||
data={tooltipContent}
|
||||
style={virtuosoStyle}
|
||||
totalListHeightChanged={setListHeight}
|
||||
itemContent={(_, item): JSX.Element => (
|
||||
<div className="uplot-tooltip-item" data-testid="uplot-tooltip-item">
|
||||
<div
|
||||
className="uplot-tooltip-item-marker"
|
||||
style={{ borderColor: item.color }}
|
||||
data-is-legend-marker={true}
|
||||
data-testid="uplot-tooltip-item-marker"
|
||||
/>
|
||||
<div
|
||||
className="uplot-tooltip-item-content"
|
||||
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
|
||||
data-testid="uplot-tooltip-item-content"
|
||||
>
|
||||
{item.label}: {item.tooltipValue}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -133,30 +133,46 @@ describe('Tooltip', () => {
|
||||
expect(screen.queryByText(unexpectedTitle)).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders single active item in header only, without a list', () => {
|
||||
it('renders lightMode class when dark mode is disabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent({ isActive: true })];
|
||||
mockUseIsDarkMode.mockReturnValue(false);
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
// Active item is shown in the header, not duplicated in a list
|
||||
expect(screen.queryByTestId('uplot-tooltip-list')).toBeNull();
|
||||
expect(screen.getByTestId('uplot-tooltip-pinned')).toBeInTheDocument();
|
||||
const pinnedContent = screen.getByTestId('uplot-tooltip-pinned-content');
|
||||
expect(pinnedContent).toHaveTextContent('Series A');
|
||||
expect(pinnedContent).toHaveTextContent('10');
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('lightMode');
|
||||
expect(container).not.toHaveClass('darkMode');
|
||||
});
|
||||
|
||||
it('renders list when multiple series are present', () => {
|
||||
it('renders darkMode class when dark mode is enabled', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [
|
||||
createTooltipContent({ isActive: true }),
|
||||
createTooltipContent({ label: 'Series B', isActive: false }),
|
||||
];
|
||||
mockUseIsDarkMode.mockReturnValue(true);
|
||||
|
||||
renderTooltip({ uPlotInstance });
|
||||
|
||||
const container = screen.getByTestId('uplot-tooltip-container');
|
||||
|
||||
expect(container).toHaveClass('darkMode');
|
||||
expect(container).not.toHaveClass('lightMode');
|
||||
});
|
||||
|
||||
it('renders tooltip items when content is provided', () => {
|
||||
const uPlotInstance = createUPlotInstance(null);
|
||||
const content = [createTooltipContent()];
|
||||
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
expect(screen.getByTestId('uplot-tooltip-list')).toBeInTheDocument();
|
||||
const list = screen.queryByTestId('uplot-tooltip-list');
|
||||
|
||||
expect(list).not.toBeNull();
|
||||
|
||||
const marker = screen.getByTestId('uplot-tooltip-item-marker');
|
||||
const itemContent = screen.getByTestId('uplot-tooltip-item-content');
|
||||
|
||||
expect(marker).toHaveStyle({ borderColor: '#ff0000' });
|
||||
expect(itemContent).toHaveStyle({ color: '#ff0000', fontWeight: '700' });
|
||||
expect(itemContent).toHaveTextContent('Series A: 10');
|
||||
});
|
||||
|
||||
it('does not render tooltip list when content is empty', () => {
|
||||
@@ -176,7 +192,7 @@ describe('Tooltip', () => {
|
||||
renderTooltip({ uPlotInstance, content });
|
||||
|
||||
const list = screen.getByTestId('uplot-tooltip-list');
|
||||
expect(list).toHaveStyle({ height: '200px' });
|
||||
expect(list).toHaveStyle({ height: '210px' });
|
||||
});
|
||||
|
||||
it('sets tooltip list height based on content length when Virtuoso reports 0 height', () => {
|
||||
|
||||
@@ -189,7 +189,7 @@ describe('Tooltip utils', () => {
|
||||
];
|
||||
}
|
||||
|
||||
it('builds tooltip content in series-index order with isActive flag set correctly', () => {
|
||||
it('builds tooltip content with active series first', () => {
|
||||
const data: AlignedData = [[0], [10], [20], [30]];
|
||||
const series = createSeriesConfig();
|
||||
const dataIndexes = [null, 0, 0, 0];
|
||||
@@ -206,21 +206,21 @@ describe('Tooltip utils', () => {
|
||||
});
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
// Series are returned in series-index order (A=index 1 before B=index 2)
|
||||
// Active (series index 2) should come first
|
||||
expect(result[0]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'A',
|
||||
value: 10,
|
||||
tooltipValue: 'formatted-10',
|
||||
color: '#ff0000',
|
||||
isActive: false,
|
||||
});
|
||||
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'B',
|
||||
value: 20,
|
||||
tooltipValue: 'formatted-20',
|
||||
color: 'color-2',
|
||||
isActive: true,
|
||||
});
|
||||
expect(result[1]).toMatchObject<Partial<TooltipContentItem>>({
|
||||
label: 'A',
|
||||
value: 10,
|
||||
tooltipValue: 'formatted-10',
|
||||
color: '#ff0000',
|
||||
isActive: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('skips series with null data index or non-finite values', () => {
|
||||
@@ -273,31 +273,5 @@ describe('Tooltip utils', () => {
|
||||
expect(result[0].value).toBe(30);
|
||||
expect(result[1].value).toBe(30);
|
||||
});
|
||||
|
||||
it('returns items in series-index order', () => {
|
||||
// Series values in non-sorted order: 3, 1, 4, 2
|
||||
const data: AlignedData = [[0], [3], [1], [4], [2]];
|
||||
const series: Series[] = [
|
||||
{ label: 'x', show: true } as Series,
|
||||
{ label: 'C', show: true, stroke: '#aaaaaa' } as Series,
|
||||
{ label: 'A', show: true, stroke: '#bbbbbb' } as Series,
|
||||
{ label: 'D', show: true, stroke: '#cccccc' } as Series,
|
||||
{ label: 'B', show: true, stroke: '#dddddd' } as Series,
|
||||
];
|
||||
const dataIndexes = [null, 0, 0, 0, 0];
|
||||
const u = createUPlotInstance();
|
||||
|
||||
const result = buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIndex: null,
|
||||
uPlotInstance: u,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
});
|
||||
|
||||
expect(result.map((item) => item.value)).toEqual([3, 1, 4, 2]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
.uplot-tooltip-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
padding: 4px 0;
|
||||
|
||||
.uplot-tooltip-item-marker {
|
||||
border-radius: 50%;
|
||||
border-style: solid;
|
||||
border-width: 2px;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.uplot-tooltip-item-content {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
justify-content: space-between;
|
||||
|
||||
.uplot-tooltip-item-label {
|
||||
white-space: normal;
|
||||
overflow-wrap: anywhere;
|
||||
}
|
||||
|
||||
&-separator {
|
||||
flex: 1;
|
||||
border-width: 0.5px;
|
||||
border-style: dashed;
|
||||
min-width: 24px;
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
import { TooltipContentItem } from '../../../types';
|
||||
|
||||
import Styles from './TooltipItem.module.scss';
|
||||
|
||||
interface TooltipItemProps {
|
||||
item: TooltipContentItem;
|
||||
isItemActive: boolean;
|
||||
containerTestId?: string;
|
||||
markerTestId?: string;
|
||||
contentTestId?: string;
|
||||
}
|
||||
|
||||
export default function TooltipItem({
|
||||
item,
|
||||
isItemActive,
|
||||
containerTestId = 'uplot-tooltip-item',
|
||||
markerTestId = 'uplot-tooltip-item-marker',
|
||||
contentTestId = 'uplot-tooltip-item-content',
|
||||
}: TooltipItemProps): JSX.Element {
|
||||
return (
|
||||
<div
|
||||
className={Styles.uplotTooltipItem}
|
||||
style={{
|
||||
opacity: isItemActive ? 1 : 0.7,
|
||||
fontWeight: isItemActive ? 700 : 400,
|
||||
}}
|
||||
data-testid={containerTestId}
|
||||
>
|
||||
<div
|
||||
className={Styles.uplotTooltipItemMarker}
|
||||
style={{ borderColor: item.color }}
|
||||
data-is-legend-marker={true}
|
||||
data-testid={markerTestId}
|
||||
/>
|
||||
<div
|
||||
className={Styles.uplotTooltipItemContent}
|
||||
style={{ color: item.color }}
|
||||
data-testid={contentTestId}
|
||||
>
|
||||
<span className={Styles.uplotTooltipItemLabel}>{item.label}</span>
|
||||
<span
|
||||
className={Styles.uplotTooltipItemContentSeparator}
|
||||
style={{ borderColor: item.color }}
|
||||
/>
|
||||
<span>{item.tooltipValue}</span>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -38,16 +38,16 @@ export function getTooltipBaseValue({
|
||||
// When series are hidden, we must use the next *visible* series, not index+1,
|
||||
// since hidden series keep raw values and would produce negative/wrong results.
|
||||
if (isStackedBarChart && baseValue !== null && series) {
|
||||
let nextVisibleSeriesIdx = -1;
|
||||
for (let seriesIdx = index + 1; seriesIdx < series.length; seriesIdx++) {
|
||||
if (series[seriesIdx]?.show) {
|
||||
nextVisibleSeriesIdx = seriesIdx;
|
||||
let nextVisibleIdx = -1;
|
||||
for (let j = index + 1; j < series.length; j++) {
|
||||
if (series[j]?.show) {
|
||||
nextVisibleIdx = j;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (nextVisibleSeriesIdx >= 1) {
|
||||
const nextStackedValue = data[nextVisibleSeriesIdx][dataIndex] ?? 0;
|
||||
baseValue = baseValue - nextStackedValue;
|
||||
if (nextVisibleIdx >= 1) {
|
||||
const nextValue = data[nextVisibleIdx][dataIndex] ?? 0;
|
||||
baseValue = baseValue - nextValue;
|
||||
}
|
||||
}
|
||||
return baseValue;
|
||||
@@ -72,15 +72,16 @@ export function buildTooltipContent({
|
||||
decimalPrecision?: PrecisionOption;
|
||||
isStackedBarChart?: boolean;
|
||||
}): TooltipContentItem[] {
|
||||
const items: TooltipContentItem[] = [];
|
||||
const active: TooltipContentItem[] = [];
|
||||
const rest: TooltipContentItem[] = [];
|
||||
|
||||
for (let seriesIndex = 1; seriesIndex < series.length; seriesIndex += 1) {
|
||||
const seriesItem = series[seriesIndex];
|
||||
if (!seriesItem?.show) {
|
||||
for (let index = 1; index < series.length; index += 1) {
|
||||
const s = series[index];
|
||||
if (!s?.show) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dataIndex = dataIndexes[seriesIndex];
|
||||
const dataIndex = dataIndexes[index];
|
||||
// Skip series with no data at the current cursor position
|
||||
if (dataIndex === null) {
|
||||
continue;
|
||||
@@ -88,22 +89,30 @@ export function buildTooltipContent({
|
||||
|
||||
const baseValue = getTooltipBaseValue({
|
||||
data,
|
||||
index: seriesIndex,
|
||||
index,
|
||||
dataIndex,
|
||||
isStackedBarChart,
|
||||
series,
|
||||
});
|
||||
|
||||
const isActive = index === activeSeriesIndex;
|
||||
|
||||
if (Number.isFinite(baseValue) && baseValue !== null) {
|
||||
items.push({
|
||||
label: String(seriesItem.label ?? ''),
|
||||
const item: TooltipContentItem = {
|
||||
label: String(s.label ?? ''),
|
||||
value: baseValue,
|
||||
tooltipValue: getToolTipValue(baseValue, yAxisUnit, decimalPrecision),
|
||||
color: resolveSeriesColor(seriesItem.stroke, uPlotInstance, seriesIndex),
|
||||
isActive: seriesIndex === activeSeriesIndex,
|
||||
});
|
||||
color: resolveSeriesColor(s.stroke, uPlotInstance, index),
|
||||
isActive,
|
||||
};
|
||||
|
||||
if (isActive) {
|
||||
active.push(item);
|
||||
} else {
|
||||
rest.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return items;
|
||||
return [...active, ...rest];
|
||||
}
|
||||
|
||||
@@ -36,8 +36,8 @@ const HOVER_DISMISS_DELAY_MS = 100;
|
||||
export default function TooltipPlugin({
|
||||
config,
|
||||
render,
|
||||
maxWidth = 450,
|
||||
maxHeight = 600,
|
||||
maxWidth = 300,
|
||||
maxHeight = 400,
|
||||
syncMode = DashboardCursorSync.None,
|
||||
syncKey = '_tooltip_sync_global_',
|
||||
pinnedTooltipElement,
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { useQuery } from 'react-query';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import { useGetMyOrganization } from 'api/generated/services/orgs';
|
||||
import { useGetMyUser } from 'api/generated/services/users';
|
||||
import listOrgPreferences from 'api/v1/org/preferences/list';
|
||||
import listUserPreferences from 'api/v1/user/preferences/list';
|
||||
@@ -84,6 +85,14 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
query: { enabled: isLoggedIn },
|
||||
});
|
||||
|
||||
const {
|
||||
data: orgData,
|
||||
isFetching: isFetchingOrgData,
|
||||
error: orgFetchDataError,
|
||||
} = useGetMyOrganization({
|
||||
query: { enabled: isLoggedIn },
|
||||
});
|
||||
|
||||
const {
|
||||
permissions: permissionsResult,
|
||||
isFetching: isFetchingPermissions,
|
||||
@@ -93,8 +102,10 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
enabled: isLoggedIn,
|
||||
});
|
||||
|
||||
const isFetchingUser = isFetchingUserData || isFetchingPermissions;
|
||||
const userFetchError = userFetchDataError || errorOnPermissions;
|
||||
const isFetchingUser =
|
||||
isFetchingUserData || isFetchingOrgData || isFetchingPermissions;
|
||||
const userFetchError =
|
||||
userFetchDataError || orgFetchDataError || errorOnPermissions;
|
||||
|
||||
const userRole = useMemo(() => {
|
||||
if (permissionsResult?.[IsAdminPermission]?.isGranted) {
|
||||
@@ -134,40 +145,39 @@ export function AppProvider({ children }: PropsWithChildren): JSX.Element {
|
||||
createdAt: toISOString(userData.data.createdAt) ?? prev.createdAt,
|
||||
updatedAt: toISOString(userData.data.updatedAt) ?? prev.updatedAt,
|
||||
}));
|
||||
}
|
||||
}, [userData, isFetchingUserData]);
|
||||
|
||||
// todo: we need to update the org name as well, we should have the [admin only role restriction on the get org api call] - BE input needed
|
||||
setOrg((prev): any => {
|
||||
useEffect(() => {
|
||||
if (!isFetchingOrgData && orgData?.data) {
|
||||
const { id: orgId, displayName: orgDisplayName } = orgData.data;
|
||||
setOrg((prev) => {
|
||||
if (!prev) {
|
||||
return [
|
||||
{
|
||||
createdAt: 0,
|
||||
id: userData.data.orgId,
|
||||
},
|
||||
];
|
||||
return [{ createdAt: 0, id: orgId, displayName: orgDisplayName ?? '' }];
|
||||
}
|
||||
const orgIndex = prev.findIndex((e) => e.id === userData.data.orgId);
|
||||
const orgIndex = prev.findIndex((e) => e.id === orgId);
|
||||
|
||||
if (orgIndex === -1) {
|
||||
return [
|
||||
...prev,
|
||||
{
|
||||
createdAt: 0,
|
||||
id: userData.data.orgId,
|
||||
},
|
||||
{ createdAt: 0, id: orgId, displayName: orgDisplayName ?? '' },
|
||||
];
|
||||
}
|
||||
|
||||
return [
|
||||
const updatedOrg: Organization[] = [
|
||||
...prev.slice(0, orgIndex),
|
||||
{
|
||||
createdAt: 0,
|
||||
id: userData.data.orgId,
|
||||
},
|
||||
{ createdAt: 0, id: orgId, displayName: orgDisplayName ?? '' },
|
||||
...prev.slice(orgIndex + 1),
|
||||
];
|
||||
return updatedOrg;
|
||||
});
|
||||
|
||||
setDefaultUser((prev) => ({
|
||||
...prev,
|
||||
organization: orgDisplayName ?? prev.organization,
|
||||
}));
|
||||
}
|
||||
}, [userData, isFetchingUserData]);
|
||||
}, [orgData, isFetchingOrgData]);
|
||||
|
||||
// fetcher for licenses v3
|
||||
const {
|
||||
|
||||
@@ -281,6 +281,48 @@ describe('AppProvider user and org data from v2 APIs', () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('populates org state from GET /api/v2/orgs/me', async () => {
|
||||
server.use(
|
||||
rest.get(MY_ORG_URL, (_, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
data: {
|
||||
id: 'org-abc',
|
||||
displayName: 'My Org',
|
||||
},
|
||||
}),
|
||||
),
|
||||
),
|
||||
rest.get(MY_USER_URL, (_, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({ data: { id: 'u-default', email: 'default@signoz.io' } }),
|
||||
),
|
||||
),
|
||||
rest.post(AUTHZ_CHECK_URL, async (req, res, ctx) => {
|
||||
const payload = await req.json();
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json(authzMockResponse(payload, [false, false, false])),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const wrapper = createWrapper();
|
||||
const { result } = renderHook(() => useAppContext(), { wrapper });
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result.current.org).not.toBeNull();
|
||||
const org = result.current.org?.[0];
|
||||
expect(org?.id).toBe('org-abc');
|
||||
expect(org?.displayName).toBe('My Org');
|
||||
},
|
||||
{ timeout: 2000 },
|
||||
);
|
||||
});
|
||||
|
||||
it('sets isFetchingUser false once both user and org calls complete', async () => {
|
||||
server.use(
|
||||
rest.get(MY_USER_URL, (_, res, ctx) =>
|
||||
|
||||
@@ -14,7 +14,6 @@ import APIError from 'types/api/error';
|
||||
interface ErrorModalContextType {
|
||||
showErrorModal: (error: APIError) => void;
|
||||
hideErrorModal: () => void;
|
||||
isErrorModalVisible: boolean;
|
||||
}
|
||||
|
||||
const ErrorModalContext = createContext<ErrorModalContextType | undefined>(
|
||||
@@ -39,10 +38,10 @@ export function ErrorModalProvider({
|
||||
setIsVisible(false);
|
||||
}, []);
|
||||
|
||||
const value = useMemo(
|
||||
() => ({ showErrorModal, hideErrorModal, isErrorModalVisible: isVisible }),
|
||||
[showErrorModal, hideErrorModal, isVisible],
|
||||
);
|
||||
const value = useMemo(() => ({ showErrorModal, hideErrorModal }), [
|
||||
showErrorModal,
|
||||
hideErrorModal,
|
||||
]);
|
||||
|
||||
return (
|
||||
<ErrorModalContext.Provider value={value}>
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import { AxiosError } from 'axios';
|
||||
|
||||
import { retryOn429 } from './errorUtils';
|
||||
|
||||
describe('retryOn429', () => {
|
||||
const make429 = (): AxiosError =>
|
||||
Object.assign(new AxiosError('Too Many Requests'), {
|
||||
response: { status: 429 },
|
||||
}) as AxiosError;
|
||||
|
||||
it('returns true on first failure (failureCount=0) for 429', () => {
|
||||
expect(retryOn429(0, make429())).toBe(true);
|
||||
});
|
||||
|
||||
it('returns true on second failure (failureCount=1) for 429', () => {
|
||||
expect(retryOn429(1, make429())).toBe(true);
|
||||
});
|
||||
|
||||
it('returns false on third failure (failureCount=2) for 429 — max retries reached', () => {
|
||||
expect(retryOn429(2, make429())).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false for non-429 axios errors', () => {
|
||||
const err = Object.assign(new AxiosError('Server Error'), {
|
||||
response: { status: 500 },
|
||||
}) as AxiosError;
|
||||
expect(retryOn429(0, err)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false for 401 axios errors', () => {
|
||||
const err = Object.assign(new AxiosError('Unauthorized'), {
|
||||
response: { status: 401 },
|
||||
}) as AxiosError;
|
||||
expect(retryOn429(0, err)).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false for non-axios errors', () => {
|
||||
expect(retryOn429(0, new Error('network error'))).toBe(false);
|
||||
});
|
||||
|
||||
it('returns false for null/undefined errors', () => {
|
||||
expect(retryOn429(0, null)).toBe(false);
|
||||
expect(retryOn429(0, undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
@@ -1,7 +1,6 @@
|
||||
import { ErrorResponseHandlerForGeneratedAPIs } from 'api/ErrorResponseHandlerForGeneratedAPIs';
|
||||
import { RenderErrorResponseDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { ErrorType } from 'api/generatedAPIInstance';
|
||||
import { AxiosError } from 'axios';
|
||||
import APIError from 'types/api/error';
|
||||
|
||||
/**
|
||||
@@ -67,10 +66,3 @@ export function handleApiError(
|
||||
showErrorFunction(apiError as APIError);
|
||||
}
|
||||
}
|
||||
|
||||
export const retryOn429 = (failureCount: number, error: unknown): boolean => {
|
||||
if (error instanceof AxiosError && error.response?.status === 429) {
|
||||
return failureCount < 2;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
@@ -97,9 +97,6 @@ export default defineConfig(
|
||||
javascriptEnabled: true,
|
||||
},
|
||||
},
|
||||
modules: {
|
||||
localsConvention: 'camelCaseOnly',
|
||||
},
|
||||
},
|
||||
define: {
|
||||
// TODO: Remove this in favor of import.meta.env
|
||||
|
||||
@@ -278,8 +278,7 @@ func (d *Dispatcher) processAlert(alert *types.Alert, route *dispatch.Route) {
|
||||
ruleId := getRuleIDFromAlert(alert)
|
||||
config, err := d.notificationManager.GetNotificationConfig(d.orgID, ruleId)
|
||||
if err != nil {
|
||||
//nolint:sloglint
|
||||
d.logger.ErrorContext(d.ctx, "error getting alert notification config", slog.String("rule.id", ruleId), errors.Attr(err))
|
||||
d.logger.ErrorContext(d.ctx, "error getting alert notification config", slog.String("rule_id", ruleId), errors.Attr(err))
|
||||
return
|
||||
}
|
||||
renotifyInterval := config.Renotify.RenotifyInterval
|
||||
@@ -329,12 +328,7 @@ func (d *Dispatcher) processAlert(alert *types.Alert, route *dispatch.Route) {
|
||||
go ag.run(func(ctx context.Context, alerts ...*types.Alert) bool {
|
||||
_, _, err := d.stage.Exec(ctx, d.logger, alerts...)
|
||||
if err != nil {
|
||||
receiverName, _ := notify.ReceiverName(ctx)
|
||||
logger := d.logger.With(
|
||||
slog.String("receiver", receiverName),
|
||||
slog.Int("num_alerts", len(alerts)),
|
||||
errors.Attr(err),
|
||||
)
|
||||
logger := d.logger.With(slog.Int("num_alerts", len(alerts)), errors.Attr(err))
|
||||
if errors.Is(ctx.Err(), context.Canceled) {
|
||||
// It is expected for the context to be canceled on
|
||||
// configuration reload or shutdown. In this case, the
|
||||
|
||||
@@ -10,26 +10,6 @@ import (
|
||||
)
|
||||
|
||||
func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/credentials", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.GetConnectionCredentials),
|
||||
handler.OpenAPIDef{
|
||||
ID: "GetConnectionCredentials",
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Get connection credentials",
|
||||
Description: "This endpoint retrieves the connection credentials required for integration",
|
||||
Request: nil,
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.Credentials),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||
},
|
||||
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.CreateAccount),
|
||||
handler.OpenAPIDef{
|
||||
@@ -37,9 +17,9 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Create account",
|
||||
Description: "This endpoint creates a new cloud integration account for the specified cloud provider",
|
||||
Request: new(citypes.PostableAccount),
|
||||
Request: new(citypes.PostableConnectionArtifact),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.GettableAccountWithConnectionArtifact),
|
||||
Response: new(citypes.GettableAccountWithArtifact),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
@@ -79,7 +59,7 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
Description: "This endpoint gets an account for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(citypes.Account),
|
||||
Response: new(citypes.GettableAccount),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusNotFound},
|
||||
@@ -159,7 +139,7 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
Description: "This endpoint gets a service for the specified cloud provider",
|
||||
Request: nil,
|
||||
RequestContentType: "",
|
||||
Response: new(citypes.Service),
|
||||
Response: new(citypes.GettableService),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
@@ -170,7 +150,7 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}/services/{service_id}", handler.New(
|
||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/services/{service_id}", handler.New(
|
||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.UpdateService),
|
||||
handler.OpenAPIDef{
|
||||
ID: "UpdateService",
|
||||
@@ -199,9 +179,9 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Agent check-in",
|
||||
Description: "[Deprecated] This endpoint is called by the deployed agent to check in",
|
||||
Request: new(citypes.PostableAgentCheckIn),
|
||||
Request: new(citypes.PostableAgentCheckInRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.GettableAgentCheckIn),
|
||||
Response: new(citypes.GettableAgentCheckInResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
@@ -219,9 +199,9 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||
Tags: []string{"cloudintegration"},
|
||||
Summary: "Agent check-in",
|
||||
Description: "This endpoint is called by the deployed agent to check in",
|
||||
Request: new(citypes.PostableAgentCheckIn),
|
||||
Request: new(citypes.PostableAgentCheckInRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(citypes.GettableAgentCheckIn),
|
||||
Response: new(citypes.GettableAgentCheckInResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{},
|
||||
|
||||
52
pkg/apiserver/signozapiserver/inframonitoring.go
Normal file
52
pkg/apiserver/signozapiserver/inframonitoring.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package signozapiserver
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func (provider *provider) addInfraMonitoringRoutes(router *mux.Router) error {
|
||||
if err := router.Handle("/api/v2/infra-monitoring/hosts/list", handler.New(
|
||||
provider.authZ.ViewAccess(provider.infraMonitoringHandler.HostsList),
|
||||
handler.OpenAPIDef{
|
||||
ID: "HostsList",
|
||||
Tags: []string{"infra-monitoring"},
|
||||
Summary: "List Hosts for Infra Monitoring",
|
||||
Description: "This endpoint returns a list of hosts along with other information for each of them",
|
||||
Request: new(inframonitoringtypes.HostsListRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(inframonitoringtypes.HostsListResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := router.Handle("/api/v2/infra-monitoring/pods/list", handler.New(
|
||||
provider.authZ.ViewAccess(provider.infraMonitoringHandler.PodsList),
|
||||
handler.OpenAPIDef{
|
||||
ID: "PodsList",
|
||||
Tags: []string{"infra-monitoring"},
|
||||
Summary: "List Pods for Infra Monitoring",
|
||||
Description: "This endpoint returns a list of pods along with metrics and metadata for each of them",
|
||||
Request: new(inframonitoringtypes.PodsListRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(inframonitoringtypes.PodsListResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusUnauthorized, http.StatusInternalServerError},
|
||||
Deprecated: false,
|
||||
SecuritySchemes: newSecuritySchemes(types.RoleViewer),
|
||||
})).Methods(http.MethodPost).GetError(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/fields"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/inframonitoring"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference"
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
@@ -47,6 +48,7 @@ type provider struct {
|
||||
dashboardModule dashboard.Module
|
||||
dashboardHandler dashboard.Handler
|
||||
metricsExplorerHandler metricsexplorer.Handler
|
||||
infraMonitoringHandler inframonitoring.Handler
|
||||
gatewayHandler gateway.Handler
|
||||
fieldsHandler fields.Handler
|
||||
authzHandler authz.Handler
|
||||
@@ -73,6 +75,7 @@ func NewFactory(
|
||||
dashboardModule dashboard.Module,
|
||||
dashboardHandler dashboard.Handler,
|
||||
metricsExplorerHandler metricsexplorer.Handler,
|
||||
infraMonitoringHandler inframonitoring.Handler,
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
@@ -102,6 +105,7 @@ func NewFactory(
|
||||
dashboardModule,
|
||||
dashboardHandler,
|
||||
metricsExplorerHandler,
|
||||
infraMonitoringHandler,
|
||||
gatewayHandler,
|
||||
fieldsHandler,
|
||||
authzHandler,
|
||||
@@ -133,6 +137,7 @@ func newProvider(
|
||||
dashboardModule dashboard.Module,
|
||||
dashboardHandler dashboard.Handler,
|
||||
metricsExplorerHandler metricsexplorer.Handler,
|
||||
infraMonitoringHandler inframonitoring.Handler,
|
||||
gatewayHandler gateway.Handler,
|
||||
fieldsHandler fields.Handler,
|
||||
authzHandler authz.Handler,
|
||||
@@ -162,6 +167,7 @@ func newProvider(
|
||||
dashboardModule: dashboardModule,
|
||||
dashboardHandler: dashboardHandler,
|
||||
metricsExplorerHandler: metricsExplorerHandler,
|
||||
infraMonitoringHandler: infraMonitoringHandler,
|
||||
gatewayHandler: gatewayHandler,
|
||||
fieldsHandler: fieldsHandler,
|
||||
authzHandler: authzHandler,
|
||||
@@ -228,6 +234,10 @@ func (provider *provider) AddToRouter(router *mux.Router) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addInfraMonitoringRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := provider.addGatewayRoutes(router); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -150,7 +150,7 @@ func (provider *provider) Grant(ctx context.Context, orgID valuer.UUID, names []
|
||||
|
||||
err = provider.Write(ctx, tuples, nil)
|
||||
if err != nil {
|
||||
return errors.WithAdditionalf(err, "failed to grant roles: %v to subject: %s", names, subject)
|
||||
return errors.WrapInternalf(err, errors.CodeInternal, "failed to grant roles: %v to subject: %s", names, subject)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -188,7 +188,7 @@ func (provider *provider) Revoke(ctx context.Context, orgID valuer.UUID, names [
|
||||
|
||||
err = provider.Write(ctx, nil, tuples)
|
||||
if err != nil {
|
||||
return errors.WithAdditionalf(err, "failed to revoke roles: %v to subject: %s", names, subject)
|
||||
return errors.WrapInternalf(err, errors.CodeInternal, "failed to revoke roles: %v to subject: %s", names, subject)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
@@ -15,14 +15,12 @@ import (
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
openfgapkgtransformer "github.com/openfga/language/pkg/go/transformer"
|
||||
openfgapkgserver "github.com/openfga/openfga/pkg/server"
|
||||
openfgaerrors "github.com/openfga/openfga/pkg/server/errors"
|
||||
"github.com/openfga/openfga/pkg/storage"
|
||||
"google.golang.org/protobuf/encoding/protojson"
|
||||
)
|
||||
|
||||
const (
|
||||
batchCheckItemErrorMessage = "::AUTHZ-CHECK-ERROR::"
|
||||
writeErrorMessage = "::AUTHZ-WRITE-ERROR::"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -250,19 +248,7 @@ func (server *Server) Write(ctx context.Context, additions []*openfgav1.TupleKey
|
||||
}(),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
openfgaError := new(openfgaerrors.InternalError)
|
||||
ok := errors.As(err, openfgaError)
|
||||
if ok {
|
||||
server.settings.Logger().ErrorContext(ctx, writeErrorMessage, errors.Attr(openfgaError.Unwrap()))
|
||||
return errors.New(errors.TypeTooManyRequests, errors.CodeTooManyRequests, openfgaError.Error())
|
||||
}
|
||||
|
||||
server.settings.Logger().ErrorContext(ctx, writeErrorMessage, errors.Attr(err))
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
return err
|
||||
}
|
||||
|
||||
func (server *Server) ListObjects(ctx context.Context, subject string, relation authtypes.Relation, typeable authtypes.Typeable) ([]*authtypes.Object, error) {
|
||||
|
||||
@@ -18,7 +18,6 @@ var (
|
||||
CodeUnknown = Code{"unknown"}
|
||||
CodeFatal = Code{"fatal"}
|
||||
CodeLicenseUnavailable = Code{"license_unavailable"}
|
||||
CodeTooManyRequests = Code{"too_many_requests"}
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@@ -12,9 +12,8 @@ var (
|
||||
TypeCanceled = typ{"canceled"}
|
||||
TypeTimeout = typ{"timeout"}
|
||||
TypeUnexpected = typ{"unexpected"} // Generic mismatch of expectations
|
||||
TypeFatal = typ{"fatal"} // Unrecoverable failure (e.g. panic)
|
||||
TypeFatal = typ{"fatal"} // Unrecoverable failure (e.g. panic)
|
||||
TypeLicenseUnavailable = typ{"license-unavailable"}
|
||||
TypeTooManyRequests = typ{"too-many-requests"}
|
||||
)
|
||||
|
||||
// Defines custom error types.
|
||||
|
||||
@@ -77,8 +77,6 @@ func ErrorTypeFromStatusCode(statusCode int) string {
|
||||
return errors.TypeTimeout.String()
|
||||
case http.StatusUnavailableForLegalReasons:
|
||||
return errors.TypeLicenseUnavailable.String()
|
||||
case http.StatusTooManyRequests:
|
||||
return errors.TypeTooManyRequests.String()
|
||||
default:
|
||||
return errors.TypeInternal.String()
|
||||
}
|
||||
@@ -110,8 +108,6 @@ func Error(rw http.ResponseWriter, cause error) {
|
||||
httpCode = http.StatusInternalServerError
|
||||
case errors.TypeLicenseUnavailable:
|
||||
httpCode = http.StatusUnavailableForLegalReasons
|
||||
case errors.TypeTooManyRequests:
|
||||
httpCode = http.StatusTooManyRequests
|
||||
}
|
||||
|
||||
body, err := json.Marshal(&ErrorResponse{Status: StatusError.s, Error: errors.AsJSON(cause)})
|
||||
|
||||
@@ -10,42 +10,37 @@ import (
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
GetConnectionCredentials(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType) (*citypes.Credentials, error)
|
||||
|
||||
CreateAccount(ctx context.Context, account *citypes.Account) error
|
||||
|
||||
// GetAccount returns cloud integration account
|
||||
GetAccount(ctx context.Context, orgID, accountID valuer.UUID, provider citypes.CloudProviderType) (*citypes.Account, error)
|
||||
GetAccount(ctx context.Context, orgID, accountID valuer.UUID) (*citypes.Account, error)
|
||||
|
||||
// ListAccounts lists accounts where agent is connected
|
||||
ListAccounts(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType) ([]*citypes.Account, error)
|
||||
ListAccounts(ctx context.Context, orgID valuer.UUID) ([]*citypes.Account, error)
|
||||
|
||||
// UpdateAccount updates the cloud integration account for a specific organization.
|
||||
UpdateAccount(ctx context.Context, account *citypes.Account) error
|
||||
|
||||
// DisconnectAccount soft deletes/removes a cloud integration account.
|
||||
DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID, provider citypes.CloudProviderType) error
|
||||
DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID) error
|
||||
|
||||
// GetConnectionArtifact returns cloud provider specific connection information,
|
||||
// client side handles how this information is shown
|
||||
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.GetConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
||||
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.ConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
||||
|
||||
// ListServicesMetadata returns the list of supported services' metadata for a cloud provider with optional filtering for a specific integration
|
||||
// This just returns a summary of the service and not the whole service definition.
|
||||
ListServicesMetadata(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType, integrationID *valuer.UUID) ([]*citypes.ServiceMetadata, error)
|
||||
// ListServicesMetadata returns the list of services metadata for a cloud provider attached with the integrationID.
|
||||
// This just returns a summary of the service and not the whole service definition
|
||||
ListServicesMetadata(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID) ([]*citypes.ServiceMetadata, error)
|
||||
|
||||
// GetService returns service definition details for a serviceID. This optionally returns the service config
|
||||
// for integrationID if provided.
|
||||
GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID citypes.ServiceID, provider citypes.CloudProviderType) (*citypes.Service, error)
|
||||
|
||||
// CreateService creates a new service for a cloud integration account.
|
||||
CreateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService, provider citypes.CloudProviderType) error
|
||||
// GetService returns service definition details for a serviceID. This returns config and
|
||||
// other details required to show in service details page on web client.
|
||||
GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID string) (*citypes.Service, error)
|
||||
|
||||
// UpdateService updates cloud integration service
|
||||
UpdateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService, provider citypes.CloudProviderType) error
|
||||
UpdateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService) error
|
||||
|
||||
// AgentCheckIn is called by agent to send heartbeat and get latest config in response.
|
||||
AgentCheckIn(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType, req *citypes.AgentCheckInRequest) (*citypes.AgentCheckInResponse, error)
|
||||
// AgentCheckIn is called by agent to heartbeat and get latest config in response.
|
||||
AgentCheckIn(ctx context.Context, orgID valuer.UUID, req *citypes.AgentCheckInRequest) (*citypes.AgentCheckInResponse, error)
|
||||
|
||||
// GetDashboardByID returns dashboard JSON for a given dashboard id.
|
||||
// this only returns the dashboard when the service (embedded in dashboard id) is enabled
|
||||
@@ -57,22 +52,7 @@ type Module interface {
|
||||
ListDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
||||
}
|
||||
|
||||
type CloudProviderModule interface {
|
||||
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.GetConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
||||
|
||||
// ListServiceDefinitions returns all service definitions for this cloud provider.
|
||||
ListServiceDefinitions(ctx context.Context) ([]*citypes.ServiceDefinition, error)
|
||||
|
||||
// GetServiceDefinition returns the service definition for the given service ID.
|
||||
GetServiceDefinition(ctx context.Context, serviceID citypes.ServiceID) (*citypes.ServiceDefinition, error)
|
||||
|
||||
// BuildIntegrationConfig compiles the provider-specific integration config from the account
|
||||
// and list of configured services. This is the config returned to the agent on check-in.
|
||||
BuildIntegrationConfig(ctx context.Context, account *citypes.Account, services []*citypes.StorableCloudIntegrationService) (*citypes.ProviderIntegrationConfig, error)
|
||||
}
|
||||
|
||||
type Handler interface {
|
||||
GetConnectionCredentials(http.ResponseWriter, *http.Request)
|
||||
CreateAccount(http.ResponseWriter, *http.Request)
|
||||
ListAccounts(http.ResponseWriter, *http.Request)
|
||||
GetAccount(http.ResponseWriter, *http.Request)
|
||||
|
||||
@@ -447,9 +447,9 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/ApplicationELB"
|
||||
"Namespace": "AWS/ApplicationELB"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -171,14 +171,14 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/ApiGateway"
|
||||
"Namespace": "AWS/ApiGateway"
|
||||
}
|
||||
]
|
||||
},
|
||||
"logs": {
|
||||
"subscriptions": [
|
||||
"cloudwatchLogsSubscriptions": [
|
||||
{
|
||||
"logGroupNamePrefix": "API-Gateway",
|
||||
"filterPattern": ""
|
||||
|
||||
@@ -374,9 +374,9 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/DynamoDB"
|
||||
"Namespace": "AWS/DynamoDB"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -495,12 +495,12 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/EC2"
|
||||
"Namespace": "AWS/EC2"
|
||||
},
|
||||
{
|
||||
"namespace": "CWAgent"
|
||||
"Namespace": "CWAgent"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -823,17 +823,17 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/ECS"
|
||||
"Namespace": "AWS/ECS"
|
||||
},
|
||||
{
|
||||
"namespace": "ECS/ContainerInsights"
|
||||
"Namespace": "ECS/ContainerInsights"
|
||||
}
|
||||
]
|
||||
},
|
||||
"logs": {
|
||||
"subscriptions": [
|
||||
"cloudwatchLogsSubscriptions": [
|
||||
{
|
||||
"logGroupNamePrefix": "/ecs",
|
||||
"filterPattern": ""
|
||||
|
||||
@@ -2702,17 +2702,17 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/EKS"
|
||||
"Namespace": "AWS/EKS"
|
||||
},
|
||||
{
|
||||
"namespace": "ContainerInsights"
|
||||
"Namespace": "ContainerInsights"
|
||||
}
|
||||
]
|
||||
},
|
||||
"logs": {
|
||||
"subscriptions": [
|
||||
"cloudwatchLogsSubscriptions": [
|
||||
{
|
||||
"logGroupNamePrefix": "/aws/containerinsights",
|
||||
"filterPattern": ""
|
||||
|
||||
@@ -1934,9 +1934,9 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/ElastiCache"
|
||||
"Namespace": "AWS/ElastiCache"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -271,14 +271,14 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/Lambda"
|
||||
"Namespace": "AWS/Lambda"
|
||||
}
|
||||
]
|
||||
},
|
||||
"logs": {
|
||||
"subscriptions": [
|
||||
"cloudwatchLogsSubscriptions": [
|
||||
{
|
||||
"logGroupNamePrefix": "/aws/lambda",
|
||||
"filterPattern": ""
|
||||
|
||||
@@ -1070,9 +1070,9 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/Kafka"
|
||||
"Namespace": "AWS/Kafka"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -775,14 +775,14 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/RDS"
|
||||
"Namespace": "AWS/RDS"
|
||||
}
|
||||
]
|
||||
},
|
||||
"logs": {
|
||||
"subscriptions": [
|
||||
"cloudwatchLogsSubscriptions": [
|
||||
{
|
||||
"logGroupNamePrefix": "/aws/rds",
|
||||
"filterPattern": ""
|
||||
|
||||
@@ -39,7 +39,7 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"logs": {
|
||||
"subscriptions": [
|
||||
"cloudwatchLogsSubscriptions": [
|
||||
{
|
||||
"logGroupNamePrefix": "x/signoz/forwarder",
|
||||
"filterPattern": ""
|
||||
|
||||
@@ -110,9 +110,9 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/SNS"
|
||||
"Namespace": "AWS/SNS"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -230,9 +230,9 @@
|
||||
"telemetryCollectionStrategy": {
|
||||
"aws": {
|
||||
"metrics": {
|
||||
"streamFilters": [
|
||||
"cloudwatchMetricStreamFilters": [
|
||||
{
|
||||
"namespace": "AWS/SQS"
|
||||
"Namespace": "AWS/SQS"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -12,10 +12,6 @@ func NewHandler() cloudintegration.Handler {
|
||||
return &handler{}
|
||||
}
|
||||
|
||||
func (handler *handler) GetConnectionCredentials(http.ResponseWriter, *http.Request) {
|
||||
panic("unimplemented")
|
||||
}
|
||||
|
||||
func (handler *handler) CreateAccount(writer http.ResponseWriter, request *http.Request) {
|
||||
// TODO implement me
|
||||
panic("implement me")
|
||||
|
||||
@@ -34,25 +34,6 @@ func (store *store) GetAccountByID(ctx context.Context, orgID, id valuer.UUID, p
|
||||
return account, nil
|
||||
}
|
||||
|
||||
func (store *store) GetConnectedAccount(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, providerAccountID string) (*cloudintegrationtypes.StorableCloudIntegration, error) {
|
||||
account := new(cloudintegrationtypes.StorableCloudIntegration)
|
||||
err := store.
|
||||
store.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(account).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("provider = ?", provider).
|
||||
Where("account_id = ?", providerAccountID).
|
||||
Where("last_agent_report IS NOT NULL").
|
||||
Where("removed_at IS NULL").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "connected account with provider account id %s not found", providerAccountID)
|
||||
}
|
||||
return account, nil
|
||||
}
|
||||
|
||||
func (store *store) ListConnectedAccounts(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) ([]*cloudintegrationtypes.StorableCloudIntegration, error) {
|
||||
var accounts []*cloudintegrationtypes.StorableCloudIntegration
|
||||
err := store.
|
||||
@@ -115,6 +96,25 @@ func (store *store) RemoveAccount(ctx context.Context, orgID, id valuer.UUID, pr
|
||||
return err
|
||||
}
|
||||
|
||||
func (store *store) GetConnectedAccount(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, providerAccountID string) (*cloudintegrationtypes.StorableCloudIntegration, error) {
|
||||
account := new(cloudintegrationtypes.StorableCloudIntegration)
|
||||
err := store.
|
||||
store.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(account).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("provider = ?", provider).
|
||||
Where("account_id = ?", providerAccountID).
|
||||
Where("last_agent_report IS NOT NULL").
|
||||
Where("removed_at IS NULL").
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "connected account with provider account id %s not found", providerAccountID)
|
||||
}
|
||||
return account, nil
|
||||
}
|
||||
|
||||
func (store *store) GetServiceByServiceID(ctx context.Context, cloudIntegrationID valuer.UUID, serviceID cloudintegrationtypes.ServiceID) (*cloudintegrationtypes.StorableCloudIntegrationService, error) {
|
||||
service := new(cloudintegrationtypes.StorableCloudIntegrationService)
|
||||
err := store.
|
||||
@@ -172,9 +172,3 @@ func (store *store) UpdateService(ctx context.Context, service *cloudintegration
|
||||
Exec(ctx)
|
||||
return err
|
||||
}
|
||||
|
||||
func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
|
||||
return store.store.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
|
||||
33
pkg/modules/inframonitoring/config.go
Normal file
33
pkg/modules/inframonitoring/config.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package inframonitoring
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
TelemetryStore TelemetryStoreConfig `mapstructure:"telemetrystore"`
|
||||
}
|
||||
|
||||
type TelemetryStoreConfig struct {
|
||||
Threads int `mapstructure:"threads"`
|
||||
}
|
||||
|
||||
func NewConfigFactory() factory.ConfigFactory {
|
||||
return factory.NewConfigFactory(factory.MustNewName("inframonitoring"), newConfig)
|
||||
}
|
||||
|
||||
func newConfig() factory.Config {
|
||||
return Config{
|
||||
TelemetryStore: TelemetryStoreConfig{
|
||||
Threads: 8,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (c Config) Validate() error {
|
||||
if c.TelemetryStore.Threads <= 0 {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "inframonitoring.telemetrystore.threads must be positive, got %d", c.TelemetryStore.Threads)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
257
pkg/modules/inframonitoring/implinframonitoring/commonmethods.go
Normal file
257
pkg/modules/inframonitoring/implinframonitoring/commonmethods.go
Normal file
@@ -0,0 +1,257 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
const (
|
||||
ResponseTypeList = "list"
|
||||
ResponseTypeGroupedList = "grouped_list"
|
||||
)
|
||||
|
||||
func (m *module) buildFilterClause(ctx context.Context, filter *qbtypes.Filter, startMillis, endMillis int64) (*sqlbuilder.WhereClause, error) {
|
||||
expression := ""
|
||||
if filter != nil {
|
||||
expression = strings.TrimSpace(filter.Expression)
|
||||
}
|
||||
if expression == "" {
|
||||
return sqlbuilder.NewWhereClause(), nil
|
||||
}
|
||||
|
||||
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(expression)
|
||||
for idx := range whereClauseSelectors {
|
||||
whereClauseSelectors[idx].Signal = telemetrytypes.SignalMetrics
|
||||
whereClauseSelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||
}
|
||||
|
||||
keys, _, err := m.telemetryMetadataStore.GetKeysMulti(ctx, whereClauseSelectors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts := querybuilder.FilterExprVisitorOpts{
|
||||
Context: ctx,
|
||||
Logger: m.logger,
|
||||
FieldMapper: m.fieldMapper,
|
||||
ConditionBuilder: m.condBuilder,
|
||||
FullTextColumn: &telemetrytypes.TelemetryFieldKey{Name: "metric_name", FieldContext: telemetrytypes.FieldContextMetric},
|
||||
FieldKeys: keys,
|
||||
StartNs: querybuilder.ToNanoSecs(uint64(startMillis)),
|
||||
EndNs: querybuilder.ToNanoSecs(uint64(endMillis)),
|
||||
}
|
||||
|
||||
whereClause, err := querybuilder.PrepareWhereClause(expression, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if whereClause == nil || whereClause.WhereClause == nil {
|
||||
return sqlbuilder.NewWhereClause(), nil
|
||||
}
|
||||
|
||||
return whereClause.WhereClause, nil
|
||||
}
|
||||
|
||||
// getMetricsExistenceAndEarliestTime checks whether any of the given metric names
|
||||
// have been reported, and returns the total count and the earliest first-reported timestamp.
|
||||
// When count is 0, minFirstReportedUnixMilli is 0.
|
||||
func (m *module) getMetricsExistenceAndEarliestTime(ctx context.Context, metricNames []string) (uint64, uint64, error) {
|
||||
if len(metricNames) == 0 {
|
||||
return 0, 0, nil
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("count(*) AS cnt", "min(first_reported_unix_milli) AS min_first_reported")
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
|
||||
sb.Where(sb.In("metric_name", sqlbuilder.List(metricNames)))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
var count, minFirstReported uint64
|
||||
err := m.telemetryStore.ClickhouseDB().QueryRow(ctx, query, args...).Scan(&count, &minFirstReported)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return count, minFirstReported, nil
|
||||
}
|
||||
|
||||
// getMetadata fetches the latest values of additionalCols for each unique combination of groupBy keys,
|
||||
// within the given time range and metric names. It uses argMax(tuple(...), unix_milli) to ensure
|
||||
// we always pick attribute values from the latest timestamp for each group.
|
||||
//
|
||||
// The returned map has a composite key of groupBy column values joined by "\x00" (null byte),
|
||||
// mapping to a flat map of col_name -> col_value (includes both groupBy and additional cols).
|
||||
func (m *module) getMetadata(
|
||||
ctx context.Context,
|
||||
metricNames []string,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
additionalCols []string,
|
||||
filter *qbtypes.Filter,
|
||||
startMs, endMs int64,
|
||||
) (map[string]map[string]string, error) {
|
||||
if len(metricNames) == 0 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "metricNames must not be empty")
|
||||
}
|
||||
if len(groupBy) == 0 {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "groupBy must not be empty")
|
||||
}
|
||||
|
||||
// Pick the optimal timeseries table based on time range; also get adjusted start.
|
||||
adjustedStart, adjustedEnd, distributedTableName, _ := telemetrymetrics.WhichTSTableToUse(
|
||||
uint64(startMs), uint64(endMs), nil,
|
||||
)
|
||||
|
||||
// Build a fingerprint subquery against the samples table using the original
|
||||
// (non-adjusted) time range. The time_series tables are ReplacingMergeTrees
|
||||
// with bucketed granularity, so WhichTSTableToUse widens the window — this
|
||||
// subquery restricts to fingerprints actually active in the requested range.
|
||||
samplesTableName := telemetrymetrics.WhichSamplesTableToUse(
|
||||
uint64(startMs), uint64(endMs),
|
||||
metrictypes.UnspecifiedType,
|
||||
metrictypes.TimeAggregationUnspecified,
|
||||
nil,
|
||||
)
|
||||
fpSB := sqlbuilder.NewSelectBuilder()
|
||||
fpSB.Select("DISTINCT fingerprint")
|
||||
fpSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, samplesTableName))
|
||||
fpSB.Where(
|
||||
fpSB.In("metric_name", sqlbuilder.List(metricNames)),
|
||||
fpSB.GE("unix_milli", startMs),
|
||||
fpSB.L("unix_milli", endMs),
|
||||
)
|
||||
|
||||
// Flatten groupBy keys to string names for SQL expressions and result scanning.
|
||||
groupByCols := make([]string, len(groupBy))
|
||||
for i, key := range groupBy {
|
||||
groupByCols[i] = key.Name
|
||||
}
|
||||
allCols := append(groupByCols, additionalCols...)
|
||||
|
||||
// --- Build inner query ---
|
||||
// Inner SELECT columns: JSONExtractString for each groupBy col + argMax(tuple(...)) for additional cols
|
||||
innerSelectCols := make([]string, 0, len(groupByCols)+1)
|
||||
for _, col := range groupByCols {
|
||||
innerSelectCols = append(innerSelectCols,
|
||||
fmt.Sprintf("JSONExtractString(labels, '%s') AS `%s`", col, col),
|
||||
)
|
||||
}
|
||||
|
||||
// Build the argMax(tuple(...), unix_milli) expression for all additional cols
|
||||
if len(additionalCols) > 0 {
|
||||
tupleArgs := make([]string, 0, len(additionalCols))
|
||||
for _, col := range additionalCols {
|
||||
tupleArgs = append(tupleArgs, fmt.Sprintf("JSONExtractString(labels, '%s')", col))
|
||||
}
|
||||
innerSelectCols = append(innerSelectCols,
|
||||
fmt.Sprintf("argMax(tuple(%s), unix_milli) AS latest_attrs", strings.Join(tupleArgs, ", ")),
|
||||
)
|
||||
}
|
||||
|
||||
innerSB := sqlbuilder.NewSelectBuilder()
|
||||
innerSB.Select(innerSelectCols...)
|
||||
innerSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTableName))
|
||||
innerSB.Where(
|
||||
innerSB.In("metric_name", sqlbuilder.List(metricNames)),
|
||||
innerSB.GE("unix_milli", adjustedStart),
|
||||
innerSB.L("unix_milli", adjustedEnd),
|
||||
fmt.Sprintf("fingerprint GLOBAL IN (%s)", innerSB.Var(fpSB)), // TODO(nikhilmantri0902): check if this can be modified to be used with local table.
|
||||
)
|
||||
|
||||
// Apply optional filter expression
|
||||
if filter != nil && strings.TrimSpace(filter.Expression) != "" {
|
||||
filterClause, err := m.buildFilterClause(ctx, filter, startMs, endMs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if filterClause != nil {
|
||||
innerSB.AddWhereClause(sqlbuilder.CopyWhereClause(filterClause))
|
||||
}
|
||||
}
|
||||
|
||||
groupByAliases := make([]string, 0, len(groupByCols))
|
||||
for _, col := range groupByCols {
|
||||
groupByAliases = append(groupByAliases, fmt.Sprintf("`%s`", col))
|
||||
}
|
||||
innerSB.GroupBy(groupByAliases...)
|
||||
|
||||
innerQuery, innerArgs := innerSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
// --- Build outer query ---
|
||||
// Outer SELECT columns: groupBy cols directly + tupleElement(latest_attrs, N) for each additionalCol
|
||||
outerSelectCols := make([]string, 0, len(allCols))
|
||||
for _, col := range groupByCols {
|
||||
outerSelectCols = append(outerSelectCols, fmt.Sprintf("`%s`", col))
|
||||
}
|
||||
for i, col := range additionalCols {
|
||||
outerSelectCols = append(outerSelectCols,
|
||||
fmt.Sprintf("tupleElement(latest_attrs, %d) AS `%s`", i+1, col),
|
||||
)
|
||||
}
|
||||
|
||||
outerSB := sqlbuilder.NewSelectBuilder()
|
||||
outerSB.Select(outerSelectCols...)
|
||||
outerSB.From(fmt.Sprintf("(%s)", innerQuery))
|
||||
|
||||
outerQuery, _ := outerSB.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
// All ? params are in innerArgs; outer query introduces none of its own.
|
||||
|
||||
rows, err := m.telemetryStore.ClickhouseDB().Query(ctx, outerQuery, innerArgs...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
result := make(map[string]map[string]string)
|
||||
|
||||
for rows.Next() {
|
||||
row := make([]string, len(allCols))
|
||||
scanPtrs := make([]any, len(row))
|
||||
for i := range row {
|
||||
scanPtrs[i] = &row[i]
|
||||
}
|
||||
|
||||
if err := rows.Scan(scanPtrs...); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
compositeKey := compositeKeyFromList(row[:len(groupByCols)])
|
||||
|
||||
attrMap := make(map[string]string, len(allCols))
|
||||
for i, col := range allCols {
|
||||
attrMap[col] = row[i]
|
||||
}
|
||||
result[compositeKey] = attrMap
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *module) validateOrderBy(orderBy *qbtypes.OrderBy, orderByToQueryNamesMap map[string][]string) error {
|
||||
if orderBy == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if _, exists := orderByToQueryNamesMap[orderBy.Key.Name]; !exists {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by key: %s", orderBy.Key.Name)
|
||||
}
|
||||
|
||||
if orderBy.Direction != qbtypes.OrderDirectionAsc && orderBy.Direction != qbtypes.OrderDirectionDesc {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid order by direction: %s", orderBy.Direction)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
72
pkg/modules/inframonitoring/implinframonitoring/handler.go
Normal file
72
pkg/modules/inframonitoring/implinframonitoring/handler.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/binding"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/SigNoz/signoz/pkg/modules/inframonitoring"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type handler struct {
|
||||
module inframonitoring.Module
|
||||
}
|
||||
|
||||
// NewHandler returns an inframonitoring.Handler implementation.
|
||||
func NewHandler(m inframonitoring.Module) inframonitoring.Handler {
|
||||
return &handler{
|
||||
module: m,
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) HostsList(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
var parsedReq inframonitoringtypes.HostsListRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &parsedReq); err != nil {
|
||||
render.Error(rw, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to parse request body"))
|
||||
return
|
||||
}
|
||||
|
||||
result, err := h.module.HostsList(req.Context(), orgID, &parsedReq)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, result)
|
||||
}
|
||||
|
||||
func (h *handler) PodsList(rw http.ResponseWriter, req *http.Request) {
|
||||
claims, err := authtypes.ClaimsFromContext(req.Context())
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
|
||||
var parsedReq inframonitoringtypes.PodsListRequest
|
||||
if err := binding.JSON.BindBody(req.Body, &parsedReq); err != nil {
|
||||
render.Error(rw, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to parse request body"))
|
||||
return
|
||||
}
|
||||
|
||||
result, err := h.module.PodsList(req.Context(), orgID, &parsedReq)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
render.Success(rw, http.StatusOK, result)
|
||||
}
|
||||
286
pkg/modules/inframonitoring/implinframonitoring/helpers.go
Normal file
286
pkg/modules/inframonitoring/implinframonitoring/helpers.go
Normal file
@@ -0,0 +1,286 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
type rankedGroup struct {
|
||||
labels map[string]string
|
||||
value float64
|
||||
}
|
||||
|
||||
func isKeyInGroupByAttrs(groupByAttrs []qbtypes.GroupByKey, key string) bool {
|
||||
for _, groupBy := range groupByAttrs {
|
||||
if groupBy.Name == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func mergeFilterExpressions(queryFilterExpr, reqFilterExpr string) string {
|
||||
queryFilterExpr = strings.TrimSpace(queryFilterExpr)
|
||||
reqFilterExpr = strings.TrimSpace(reqFilterExpr)
|
||||
if queryFilterExpr == "" {
|
||||
return reqFilterExpr
|
||||
}
|
||||
if reqFilterExpr == "" {
|
||||
return queryFilterExpr
|
||||
}
|
||||
return fmt.Sprintf("(%s) AND (%s)", queryFilterExpr, reqFilterExpr)
|
||||
}
|
||||
|
||||
// compositeKeyFromList builds a composite key by joining the given parts
|
||||
// with a null byte separator. This is the canonical way to construct
|
||||
// composite keys for group identification across the infra monitoring module.
|
||||
func compositeKeyFromList(parts []string) string {
|
||||
return strings.Join(parts, "\x00")
|
||||
}
|
||||
|
||||
// compositeKeyFromLabels builds a composite key from a label map by extracting
|
||||
// the value for each groupBy key in order and joining them via compositeKeyFromList.
|
||||
func compositeKeyFromLabels(labels map[string]string, groupBy []qbtypes.GroupByKey) string {
|
||||
parts := make([]string, len(groupBy))
|
||||
for i, key := range groupBy {
|
||||
parts[i] = labels[key.Name]
|
||||
}
|
||||
return compositeKeyFromList(parts)
|
||||
}
|
||||
|
||||
// parseAndSortGroups extracts group label maps from a ScalarData response and
|
||||
// sorts them by the ranking query's aggregation value.
|
||||
func parseAndSortGroups(
|
||||
resp *qbtypes.QueryRangeResponse,
|
||||
rankingQueryName string,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
direction qbtypes.OrderDirection,
|
||||
) []rankedGroup {
|
||||
if resp == nil || len(resp.Data.Results) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Find the ScalarData that contains the ranking column.
|
||||
var sd *qbtypes.ScalarData
|
||||
for _, r := range resp.Data.Results {
|
||||
candidate, ok := r.(*qbtypes.ScalarData)
|
||||
if !ok || candidate == nil {
|
||||
continue
|
||||
}
|
||||
for _, col := range candidate.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeAggregation && col.QueryName == rankingQueryName {
|
||||
sd = candidate
|
||||
break
|
||||
}
|
||||
}
|
||||
if sd != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
if sd == nil || len(sd.Data) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
groupColIndices := make(map[string]int)
|
||||
rankingColIdx := -1
|
||||
for i, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup {
|
||||
groupColIndices[col.Name] = i
|
||||
}
|
||||
if col.Type == qbtypes.ColumnTypeAggregation && col.QueryName == rankingQueryName {
|
||||
rankingColIdx = i
|
||||
}
|
||||
}
|
||||
if rankingColIdx == -1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
groups := make([]rankedGroup, 0, len(sd.Data))
|
||||
for _, row := range sd.Data {
|
||||
labels := make(map[string]string, len(groupBy))
|
||||
for _, key := range groupBy {
|
||||
if idx, ok := groupColIndices[key.Name]; ok && idx < len(row) {
|
||||
labels[key.Name] = fmt.Sprintf("%v", row[idx])
|
||||
}
|
||||
}
|
||||
var value float64
|
||||
if rankingColIdx < len(row) {
|
||||
if v, ok := row[rankingColIdx].(float64); ok {
|
||||
value = v
|
||||
}
|
||||
}
|
||||
groups = append(groups, rankedGroup{labels: labels, value: value})
|
||||
}
|
||||
|
||||
sort.Slice(groups, func(i, j int) bool {
|
||||
if direction == qbtypes.OrderDirectionAsc {
|
||||
return groups[i].value < groups[j].value
|
||||
}
|
||||
return groups[i].value > groups[j].value
|
||||
})
|
||||
|
||||
return groups
|
||||
}
|
||||
|
||||
// paginateWithBackfill returns the page of groups for [offset, offset+limit).
|
||||
// The virtual sorted list is: metric-ranked groups first, then metadata-only
|
||||
// groups (those in metadataMap but not in metric results) sorted alphabetically.
|
||||
func paginateWithBackfill(
|
||||
metricGroups []rankedGroup,
|
||||
metadataMap map[string]map[string]string,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
offset, limit int,
|
||||
) []map[string]string {
|
||||
metricKeySet := make(map[string]bool, len(metricGroups))
|
||||
for _, g := range metricGroups {
|
||||
metricKeySet[compositeKeyFromLabels(g.labels, groupBy)] = true
|
||||
}
|
||||
|
||||
metadataOnlyKeys := make([]string, 0)
|
||||
for compositeKey := range metadataMap {
|
||||
if !metricKeySet[compositeKey] {
|
||||
metadataOnlyKeys = append(metadataOnlyKeys, compositeKey)
|
||||
}
|
||||
}
|
||||
sort.Strings(metadataOnlyKeys)
|
||||
|
||||
totalMetric := len(metricGroups)
|
||||
totalAll := totalMetric + len(metadataOnlyKeys)
|
||||
|
||||
end := offset + limit
|
||||
if end > totalAll {
|
||||
end = totalAll
|
||||
}
|
||||
if offset >= totalAll {
|
||||
return nil
|
||||
}
|
||||
|
||||
pageGroups := make([]map[string]string, 0, end-offset)
|
||||
for i := offset; i < end; i++ {
|
||||
if i < totalMetric {
|
||||
pageGroups = append(pageGroups, metricGroups[i].labels)
|
||||
} else {
|
||||
compositeKey := metadataOnlyKeys[i-totalMetric]
|
||||
attrs := metadataMap[compositeKey]
|
||||
labels := make(map[string]string, len(groupBy))
|
||||
for _, key := range groupBy {
|
||||
labels[key.Name] = attrs[key.Name]
|
||||
}
|
||||
pageGroups = append(pageGroups, labels)
|
||||
}
|
||||
}
|
||||
return pageGroups
|
||||
}
|
||||
|
||||
// buildFullQueryRequest creates a QueryRangeRequest for all metrics,
|
||||
// restricted to the given page of groups via an IN filter.
|
||||
// Accepts primitive fields so it can be reused across different v2 APIs
|
||||
// (hosts, pods, etc.).
|
||||
func buildFullQueryRequest(
|
||||
start int64,
|
||||
end int64,
|
||||
filterExpr string,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
pageGroups []map[string]string,
|
||||
tableListQuery *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.QueryRangeRequest {
|
||||
groupValues := make(map[string][]string)
|
||||
for _, labels := range pageGroups {
|
||||
for k, v := range labels {
|
||||
groupValues[k] = append(groupValues[k], v)
|
||||
}
|
||||
}
|
||||
|
||||
inClauses := make([]string, 0, len(groupValues))
|
||||
for key, values := range groupValues {
|
||||
quoted := make([]string, len(values))
|
||||
for i, v := range values {
|
||||
quoted[i] = fmt.Sprintf("'%s'", v)
|
||||
}
|
||||
inClauses = append(inClauses, fmt.Sprintf("%s IN (%s)", key, strings.Join(quoted, ", ")))
|
||||
}
|
||||
inFilterExpr := strings.Join(inClauses, " AND ")
|
||||
|
||||
fullReq := &qbtypes.QueryRangeRequest{
|
||||
Start: uint64(start),
|
||||
End: uint64(end),
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: make([]qbtypes.QueryEnvelope, 0, len(tableListQuery.CompositeQuery.Queries)),
|
||||
},
|
||||
}
|
||||
|
||||
for _, envelope := range tableListQuery.CompositeQuery.Queries {
|
||||
copied := envelope
|
||||
if copied.Type == qbtypes.QueryTypeBuilder {
|
||||
existingExpr := ""
|
||||
if f := copied.GetFilter(); f != nil {
|
||||
existingExpr = f.Expression
|
||||
}
|
||||
merged := mergeFilterExpressions(existingExpr, filterExpr)
|
||||
merged = mergeFilterExpressions(merged, inFilterExpr)
|
||||
copied.SetFilter(&qbtypes.Filter{Expression: merged})
|
||||
copied.SetGroupBy(groupBy)
|
||||
}
|
||||
fullReq.CompositeQuery.Queries = append(fullReq.CompositeQuery.Queries, copied)
|
||||
}
|
||||
|
||||
return fullReq
|
||||
}
|
||||
|
||||
// parseFullQueryResponse extracts per-group metric values from the full
|
||||
// composite query response. Returns compositeKey -> (queryName -> value).
|
||||
// Each enabled query/formula produces its own ScalarData entry in Results,
|
||||
// so we iterate over all of them and merge metrics per composite key.
|
||||
func parseFullQueryResponse(
|
||||
resp *qbtypes.QueryRangeResponse,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
) map[string]map[string]float64 {
|
||||
result := make(map[string]map[string]float64)
|
||||
if resp == nil || len(resp.Data.Results) == 0 {
|
||||
return result
|
||||
}
|
||||
|
||||
for _, r := range resp.Data.Results {
|
||||
sd, ok := r.(*qbtypes.ScalarData)
|
||||
if !ok || sd == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
groupColIndices := make(map[string]int)
|
||||
aggCols := make(map[int]string) // col index -> query name
|
||||
for i, col := range sd.Columns {
|
||||
if col.Type == qbtypes.ColumnTypeGroup {
|
||||
groupColIndices[col.Name] = i
|
||||
}
|
||||
if col.Type == qbtypes.ColumnTypeAggregation {
|
||||
aggCols[i] = col.QueryName
|
||||
}
|
||||
}
|
||||
|
||||
for _, row := range sd.Data {
|
||||
labels := make(map[string]string, len(groupBy))
|
||||
for _, key := range groupBy {
|
||||
if idx, ok := groupColIndices[key.Name]; ok && idx < len(row) {
|
||||
labels[key.Name] = fmt.Sprintf("%v", row[idx])
|
||||
}
|
||||
}
|
||||
compositeKey := compositeKeyFromLabels(labels, groupBy)
|
||||
|
||||
if result[compositeKey] == nil {
|
||||
result[compositeKey] = make(map[string]float64)
|
||||
}
|
||||
for idx, queryName := range aggCols {
|
||||
if idx < len(row) {
|
||||
if v, ok := row[idx].(float64); ok {
|
||||
result[compositeKey][queryName] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
283
pkg/modules/inframonitoring/implinframonitoring/helpers_test.go
Normal file
283
pkg/modules/inframonitoring/implinframonitoring/helpers_test.go
Normal file
@@ -0,0 +1,283 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
func groupByKey(name string) qbtypes.GroupByKey {
|
||||
return qbtypes.GroupByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsKeyInGroupByAttrs(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
groupByAttrs []qbtypes.GroupByKey
|
||||
key string
|
||||
expectedFound bool
|
||||
}{
|
||||
{
|
||||
name: "key present in single-element list",
|
||||
groupByAttrs: []qbtypes.GroupByKey{groupByKey("host.name")},
|
||||
key: "host.name",
|
||||
expectedFound: true,
|
||||
},
|
||||
{
|
||||
name: "key present in multi-element list",
|
||||
groupByAttrs: []qbtypes.GroupByKey{
|
||||
groupByKey("host.name"),
|
||||
groupByKey("os.type"),
|
||||
groupByKey("k8s.cluster.name"),
|
||||
},
|
||||
key: "os.type",
|
||||
expectedFound: true,
|
||||
},
|
||||
{
|
||||
name: "key at last position",
|
||||
groupByAttrs: []qbtypes.GroupByKey{
|
||||
groupByKey("host.name"),
|
||||
groupByKey("os.type"),
|
||||
},
|
||||
key: "os.type",
|
||||
expectedFound: true,
|
||||
},
|
||||
{
|
||||
name: "key not in list",
|
||||
groupByAttrs: []qbtypes.GroupByKey{groupByKey("host.name")},
|
||||
key: "os.type",
|
||||
expectedFound: false,
|
||||
},
|
||||
{
|
||||
name: "empty group by list",
|
||||
groupByAttrs: []qbtypes.GroupByKey{},
|
||||
key: "host.name",
|
||||
expectedFound: false,
|
||||
},
|
||||
{
|
||||
name: "nil group by list",
|
||||
groupByAttrs: nil,
|
||||
key: "host.name",
|
||||
expectedFound: false,
|
||||
},
|
||||
{
|
||||
name: "empty key string",
|
||||
groupByAttrs: []qbtypes.GroupByKey{groupByKey("host.name")},
|
||||
key: "",
|
||||
expectedFound: false,
|
||||
},
|
||||
{
|
||||
name: "empty key matches empty-named group by key",
|
||||
groupByAttrs: []qbtypes.GroupByKey{groupByKey("")},
|
||||
key: "",
|
||||
expectedFound: true,
|
||||
},
|
||||
{
|
||||
name: "partial match does not count",
|
||||
groupByAttrs: []qbtypes.GroupByKey{
|
||||
groupByKey("host"),
|
||||
},
|
||||
key: "host.name",
|
||||
expectedFound: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := isKeyInGroupByAttrs(tt.groupByAttrs, tt.key)
|
||||
if got != tt.expectedFound {
|
||||
t.Errorf("isKeyInGroupByAttrs(%v, %q) = %v, want %v",
|
||||
tt.groupByAttrs, tt.key, got, tt.expectedFound)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMergeFilterExpressions(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
queryFilterExpr string
|
||||
reqFilterExpr string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "both non-empty",
|
||||
queryFilterExpr: "cpu > 50",
|
||||
reqFilterExpr: "host.name = 'web-1'",
|
||||
expected: "(cpu > 50) AND (host.name = 'web-1')",
|
||||
},
|
||||
{
|
||||
name: "query empty, req non-empty",
|
||||
queryFilterExpr: "",
|
||||
reqFilterExpr: "host.name = 'web-1'",
|
||||
expected: "host.name = 'web-1'",
|
||||
},
|
||||
{
|
||||
name: "query non-empty, req empty",
|
||||
queryFilterExpr: "cpu > 50",
|
||||
reqFilterExpr: "",
|
||||
expected: "cpu > 50",
|
||||
},
|
||||
{
|
||||
name: "both empty",
|
||||
queryFilterExpr: "",
|
||||
reqFilterExpr: "",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "whitespace-only query treated as empty",
|
||||
queryFilterExpr: " ",
|
||||
reqFilterExpr: "host.name = 'web-1'",
|
||||
expected: "host.name = 'web-1'",
|
||||
},
|
||||
{
|
||||
name: "whitespace-only req treated as empty",
|
||||
queryFilterExpr: "cpu > 50",
|
||||
reqFilterExpr: " ",
|
||||
expected: "cpu > 50",
|
||||
},
|
||||
{
|
||||
name: "both whitespace-only",
|
||||
queryFilterExpr: " ",
|
||||
reqFilterExpr: " ",
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "leading/trailing whitespace trimmed before merge",
|
||||
queryFilterExpr: " cpu > 50 ",
|
||||
reqFilterExpr: " mem < 80 ",
|
||||
expected: "(cpu > 50) AND (mem < 80)",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := mergeFilterExpressions(tt.queryFilterExpr, tt.reqFilterExpr)
|
||||
if got != tt.expected {
|
||||
t.Errorf("mergeFilterExpressions(%q, %q) = %q, want %q",
|
||||
tt.queryFilterExpr, tt.reqFilterExpr, got, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompositeKeyFromList(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
parts []string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single part",
|
||||
parts: []string{"web-1"},
|
||||
expected: "web-1",
|
||||
},
|
||||
{
|
||||
name: "multiple parts joined with null separator",
|
||||
parts: []string{"web-1", "linux", "us-east"},
|
||||
expected: "web-1\x00linux\x00us-east",
|
||||
},
|
||||
{
|
||||
name: "empty slice returns empty string",
|
||||
parts: []string{},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "nil slice returns empty string",
|
||||
parts: nil,
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "parts with empty strings",
|
||||
parts: []string{"web-1", "", "us-east"},
|
||||
expected: "web-1\x00\x00us-east",
|
||||
},
|
||||
{
|
||||
name: "all empty strings",
|
||||
parts: []string{"", ""},
|
||||
expected: "\x00",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := compositeKeyFromList(tt.parts)
|
||||
if got != tt.expected {
|
||||
t.Errorf("compositeKeyFromList(%v) = %q, want %q",
|
||||
tt.parts, got, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompositeKeyFromLabels(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
labels map[string]string
|
||||
groupBy []qbtypes.GroupByKey
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single group-by key",
|
||||
labels: map[string]string{"host.name": "web-1"},
|
||||
groupBy: []qbtypes.GroupByKey{groupByKey("host.name")},
|
||||
expected: "web-1",
|
||||
},
|
||||
{
|
||||
name: "multiple group-by keys joined with null separator",
|
||||
labels: map[string]string{
|
||||
"host.name": "web-1",
|
||||
"os.type": "linux",
|
||||
},
|
||||
groupBy: []qbtypes.GroupByKey{groupByKey("host.name"), groupByKey("os.type")},
|
||||
expected: "web-1\x00linux",
|
||||
},
|
||||
{
|
||||
name: "missing label yields empty segment",
|
||||
labels: map[string]string{"host.name": "web-1"},
|
||||
groupBy: []qbtypes.GroupByKey{groupByKey("host.name"), groupByKey("os.type")},
|
||||
expected: "web-1\x00",
|
||||
},
|
||||
{
|
||||
name: "empty labels map",
|
||||
labels: map[string]string{},
|
||||
groupBy: []qbtypes.GroupByKey{groupByKey("host.name")},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty group-by slice",
|
||||
labels: map[string]string{"host.name": "web-1"},
|
||||
groupBy: []qbtypes.GroupByKey{},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "nil labels map",
|
||||
labels: nil,
|
||||
groupBy: []qbtypes.GroupByKey{groupByKey("host.name")},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "order matches group-by order, not map iteration order",
|
||||
labels: map[string]string{
|
||||
"z": "last",
|
||||
"a": "first",
|
||||
"m": "middle",
|
||||
},
|
||||
groupBy: []qbtypes.GroupByKey{groupByKey("a"), groupByKey("m"), groupByKey("z")},
|
||||
expected: "first\x00middle\x00last",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := compositeKeyFromLabels(tt.labels, tt.groupBy)
|
||||
if got != tt.expected {
|
||||
t.Errorf("compositeKeyFromLabels(%v, %v) = %q, want %q",
|
||||
tt.labels, tt.groupBy, got, tt.expected)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
492
pkg/modules/inframonitoring/implinframonitoring/hosts.go
Normal file
492
pkg/modules/inframonitoring/implinframonitoring/hosts.go
Normal file
@@ -0,0 +1,492 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
)
|
||||
|
||||
var (
|
||||
hostNameAttrKey = "host.name"
|
||||
)
|
||||
|
||||
// Helper group-by key used across all queries.
|
||||
var hostNameGroupByKey = qbtypes.GroupByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: hostNameAttrKey,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
|
||||
var hostsTableMetricNamesList = []string{
|
||||
"system.cpu.time",
|
||||
"system.memory.usage",
|
||||
"system.cpu.load_average.15m",
|
||||
"system.filesystem.usage",
|
||||
}
|
||||
|
||||
var hostAttrKeysForMetadata = []string{
|
||||
"os.type",
|
||||
}
|
||||
|
||||
// orderByToHostsQueryNames maps the orderBy column to the query/formula names
|
||||
// from HostsTableListQuery used for ranking host groups.
|
||||
var orderByToHostsQueryNames = map[string][]string{
|
||||
"cpu": {"A", "B", "F1"},
|
||||
"memory": {"C", "D", "F2"},
|
||||
"wait": {"E", "F", "F3"},
|
||||
"disk_usage": {"H", "I", "F4"},
|
||||
"load15": {"G"},
|
||||
}
|
||||
|
||||
func (m *module) newHostsTableListQuery() *qbtypes.QueryRangeRequest {
|
||||
return &qbtypes.QueryRangeRequest{
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
// Query A: CPU usage logic (non-idle)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.cpu.time",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "state != 'idle'",
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Query B: CPU usage (all states)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.cpu.time",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Formula F1: CPU Usage (%)
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "A/B",
|
||||
Legend: "CPU Usage (%)",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query C: Memory usage (state = used)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "C",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.memory.usage",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "state = 'used'",
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Query D: Memory usage (all states)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "D",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.memory.usage",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Formula F2: Memory Usage (%)
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "F2",
|
||||
Expression: "C/D",
|
||||
Legend: "Memory Usage (%)",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query E: CPU Wait time (state = wait)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "E",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.cpu.time",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "state = 'wait'",
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Query F: CPU time (all states)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "F",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.cpu.time",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Formula F3: CPU Wait Time (%)
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "F3",
|
||||
Expression: "E/F",
|
||||
Legend: "CPU Wait Time (%)",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query G: Load15
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "G",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Legend: "CPU Load Average (15m)",
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.cpu.load_average.15m",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query H: Filesystem Usage (state = used)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "H",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.filesystem.usage",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "state = 'used'",
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Query I: Filesystem Usage (all states)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "I",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "system.filesystem.usage",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{hostNameGroupByKey},
|
||||
Disabled: true,
|
||||
},
|
||||
},
|
||||
// Formula F4: Disk Usage (%)
|
||||
{
|
||||
Type: qbtypes.QueryTypeFormula,
|
||||
Spec: qbtypes.QueryBuilderFormula{
|
||||
Name: "F4",
|
||||
Expression: "H/I",
|
||||
Legend: "Disk Usage (%)",
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// getTopHostGroups runs a ranking query for the ordering metric, sorts the
|
||||
// results, paginates, and backfills from metadataMap when the page extends
|
||||
// past the metric-ranked groups.
|
||||
func (m *module) getTopHostGroups(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
req *inframonitoringtypes.HostsListRequest,
|
||||
metadataMap map[string]map[string]string,
|
||||
) ([]map[string]string, error) {
|
||||
orderByKey := req.OrderBy.Key.Name
|
||||
queryNamesForOrderBy := orderByToHostsQueryNames[orderByKey]
|
||||
// The last entry is the formula/query whose value we sort by.
|
||||
rankingQueryName := queryNamesForOrderBy[len(queryNamesForOrderBy)-1]
|
||||
|
||||
topReq := &qbtypes.QueryRangeRequest{
|
||||
Start: uint64(req.Start),
|
||||
End: uint64(req.End),
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: make([]qbtypes.QueryEnvelope, 0, len(queryNamesForOrderBy)),
|
||||
},
|
||||
}
|
||||
|
||||
for _, envelope := range m.newHostsTableListQuery().CompositeQuery.Queries {
|
||||
if !slices.Contains(queryNamesForOrderBy, envelope.GetQueryName()) {
|
||||
continue
|
||||
}
|
||||
copied := envelope
|
||||
if copied.Type == qbtypes.QueryTypeBuilder {
|
||||
existingExpr := ""
|
||||
if f := copied.GetFilter(); f != nil {
|
||||
existingExpr = f.Expression
|
||||
}
|
||||
reqFilterExpr := ""
|
||||
if req.Filter != nil {
|
||||
reqFilterExpr = req.Filter.Expression
|
||||
}
|
||||
merged := mergeFilterExpressions(existingExpr, reqFilterExpr)
|
||||
copied.SetFilter(&qbtypes.Filter{Expression: merged})
|
||||
copied.SetGroupBy(req.GroupBy)
|
||||
}
|
||||
topReq.CompositeQuery.Queries = append(topReq.CompositeQuery.Queries, copied)
|
||||
}
|
||||
|
||||
resp, err := m.querier.QueryRange(ctx, orgID, topReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
allMetricGroups := parseAndSortGroups(resp, rankingQueryName, req.GroupBy, req.OrderBy.Direction)
|
||||
return paginateWithBackfill(allMetricGroups, metadataMap, req.GroupBy, req.Offset, req.Limit), nil
|
||||
}
|
||||
|
||||
// applyHostsActiveStatusFilter modifies req.Filter.Expression to include an IN/NOT IN
|
||||
// clause based on FilterByStatus and the set of active hosts.
|
||||
// Returns true if the caller should short-circuit with an empty result (ACTIVE
|
||||
// requested but no hosts are active).
|
||||
func (m *module) applyHostsActiveStatusFilter(req *inframonitoringtypes.HostsListRequest, activeHostsMap map[string]bool) (shouldShortCircuit bool) {
|
||||
if req.FilterByStatus != inframonitoringtypes.HostStatusActive && req.FilterByStatus != inframonitoringtypes.HostStatusInactive {
|
||||
return false
|
||||
}
|
||||
|
||||
activeHosts := make([]string, 0, len(activeHostsMap))
|
||||
for host := range activeHostsMap {
|
||||
activeHosts = append(activeHosts, fmt.Sprintf("'%s'", host))
|
||||
}
|
||||
|
||||
if len(activeHosts) == 0 {
|
||||
return req.FilterByStatus == inframonitoringtypes.HostStatusActive
|
||||
}
|
||||
|
||||
op := "IN"
|
||||
if req.FilterByStatus == inframonitoringtypes.HostStatusInactive {
|
||||
op = "NOT IN"
|
||||
}
|
||||
if req.Filter == nil {
|
||||
req.Filter = &qbtypes.Filter{}
|
||||
}
|
||||
statusClause := fmt.Sprintf("%s %s (%s)", hostNameAttrKey, op, strings.Join(activeHosts, ", "))
|
||||
req.Filter.Expression = mergeFilterExpressions(req.Filter.Expression, statusClause)
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *module) getHostsTableMetadata(ctx context.Context, req *inframonitoringtypes.HostsListRequest) (map[string]map[string]string, error) {
|
||||
var nonGroupByAttrs []string
|
||||
for _, key := range hostAttrKeysForMetadata {
|
||||
if !isKeyInGroupByAttrs(req.GroupBy, key) {
|
||||
nonGroupByAttrs = append(nonGroupByAttrs, key)
|
||||
}
|
||||
}
|
||||
metadataMap, err := m.getMetadata(ctx, hostsTableMetricNamesList, req.GroupBy, nonGroupByAttrs, req.Filter, req.Start, req.End)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return metadataMap, nil
|
||||
}
|
||||
|
||||
// buildHostRecords constructs the final list of HostRecords for a page.
|
||||
// Groups that had no metric data get default values of -1.
|
||||
func (m *module) buildHostRecords(
|
||||
resp *qbtypes.QueryRangeResponse,
|
||||
pageGroups []map[string]string,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
metadataMap map[string]map[string]string,
|
||||
activeHostsMap map[string]bool,
|
||||
) []inframonitoringtypes.HostRecord {
|
||||
metricsMap := parseFullQueryResponse(resp, groupBy)
|
||||
|
||||
records := make([]inframonitoringtypes.HostRecord, 0, len(pageGroups))
|
||||
for _, labels := range pageGroups {
|
||||
compositeKey := compositeKeyFromLabels(labels, groupBy)
|
||||
hostName := labels[hostNameAttrKey]
|
||||
|
||||
var activeStatus string
|
||||
if hostName != "" {
|
||||
if activeHostsMap[hostName] {
|
||||
activeStatus = inframonitoringtypes.HostStatusActive.StringValue()
|
||||
} else {
|
||||
activeStatus = inframonitoringtypes.HostStatusInactive.StringValue()
|
||||
}
|
||||
}
|
||||
|
||||
record := inframonitoringtypes.HostRecord{
|
||||
HostName: hostName,
|
||||
Status: activeStatus,
|
||||
CPU: -1,
|
||||
Memory: -1,
|
||||
Wait: -1,
|
||||
Load15: -1,
|
||||
DiskUsage: -1,
|
||||
Meta: map[string]interface{}{},
|
||||
}
|
||||
|
||||
if metrics, ok := metricsMap[compositeKey]; ok {
|
||||
if v, exists := metrics["F1"]; exists {
|
||||
record.CPU = v
|
||||
}
|
||||
if v, exists := metrics["F2"]; exists {
|
||||
record.Memory = v
|
||||
}
|
||||
if v, exists := metrics["F3"]; exists {
|
||||
record.Wait = v
|
||||
}
|
||||
if v, exists := metrics["F4"]; exists {
|
||||
record.DiskUsage = v
|
||||
}
|
||||
if v, exists := metrics["G"]; exists {
|
||||
record.Load15 = v
|
||||
}
|
||||
}
|
||||
|
||||
if attrs, ok := metadataMap[compositeKey]; ok {
|
||||
for k, v := range attrs {
|
||||
record.Meta[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
records = append(records, record)
|
||||
}
|
||||
return records
|
||||
}
|
||||
|
||||
// getActiveHosts returns a set of host names that have reported metrics recently (since sinceUnixMilli).
|
||||
// It queries distributed_metadata for hosts where last_reported_unix_milli >= sinceUnixMilli.
|
||||
// TODO(nikhilmantri0902): This method does not return active hosts numbers based on custom grouping by. So
|
||||
// if we have a different group by key than host.name in the API, then this method's response, will be useless technically, because
|
||||
// with a group-by different from host.name, we should show count of active-inactive hosts in that group.
|
||||
// We should have a way to determine active groups based on the group by keys in the request.
|
||||
func (m *module) getActiveHosts(ctx context.Context, metricNames []string, hostNameAttr string) (map[string]bool, error) {
|
||||
sinceUnixMilli := time.Now().Add(-10 * time.Minute).UTC().UnixMilli()
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Distinct()
|
||||
sb.Select("attr_string_value")
|
||||
sb.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, telemetrymetrics.AttributesMetadataTableName))
|
||||
sb.Where(
|
||||
sb.In("metric_name", sqlbuilder.List(metricNames)),
|
||||
sb.E("attr_name", hostNameAttr),
|
||||
sb.GE("last_reported_unix_milli", sinceUnixMilli),
|
||||
)
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := m.telemetryStore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
activeHosts := make(map[string]bool)
|
||||
for rows.Next() {
|
||||
var hostName string
|
||||
if err := rows.Scan(&hostName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if hostName != "" {
|
||||
activeHosts[hostName] = true
|
||||
}
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return activeHosts, nil
|
||||
}
|
||||
229
pkg/modules/inframonitoring/implinframonitoring/module.go
Normal file
229
pkg/modules/inframonitoring/implinframonitoring/module.go
Normal file
@@ -0,0 +1,229 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/inframonitoring"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore
|
||||
querier querier.Querier
|
||||
fieldMapper qbtypes.FieldMapper
|
||||
condBuilder qbtypes.ConditionBuilder
|
||||
logger *slog.Logger
|
||||
config inframonitoring.Config
|
||||
}
|
||||
|
||||
// NewModule constructs the inframonitoring module with the provided dependencies.
|
||||
func NewModule(
|
||||
telemetryStore telemetrystore.TelemetryStore,
|
||||
telemetryMetadataStore telemetrytypes.MetadataStore,
|
||||
querier querier.Querier,
|
||||
providerSettings factory.ProviderSettings,
|
||||
cfg inframonitoring.Config,
|
||||
) inframonitoring.Module {
|
||||
fieldMapper := telemetrymetrics.NewFieldMapper()
|
||||
condBuilder := telemetrymetrics.NewConditionBuilder(fieldMapper)
|
||||
return &module{
|
||||
telemetryStore: telemetryStore,
|
||||
telemetryMetadataStore: telemetryMetadataStore,
|
||||
querier: querier,
|
||||
fieldMapper: fieldMapper,
|
||||
condBuilder: condBuilder,
|
||||
logger: providerSettings.Logger,
|
||||
config: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *module) HostsList(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.HostsListRequest) (*inframonitoringtypes.HostsListResponse, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := &inframonitoringtypes.HostsListResponse{}
|
||||
|
||||
// default to cpu order by
|
||||
if req.OrderBy == nil {
|
||||
req.OrderBy = &qbtypes.OrderBy{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "cpu",
|
||||
},
|
||||
},
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
if err := m.validateOrderBy(req.OrderBy, orderByToHostsQueryNames); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// default to host name group by
|
||||
if len(req.GroupBy) == 0 {
|
||||
req.GroupBy = []qbtypes.GroupByKey{hostNameGroupByKey}
|
||||
resp.Type = ResponseTypeList
|
||||
} else {
|
||||
resp.Type = ResponseTypeGroupedList
|
||||
}
|
||||
|
||||
// 1. Check if any host metrics exist and get earliest retention time.
|
||||
// If no host metrics exist, return early — the UI shows the onboarding guide.
|
||||
// 2. If metrics exist but req.End is before the earliest reported time, convey retention boundary.
|
||||
count, minFirstReportedUnixMilli, err := m.getMetricsExistenceAndEarliestTime(ctx, hostsTableMetricNamesList)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if count == 0 {
|
||||
resp.SentAnyMetricsData = false
|
||||
resp.Records = []inframonitoringtypes.HostRecord{}
|
||||
resp.Total = 0
|
||||
return resp, nil
|
||||
}
|
||||
resp.SentAnyMetricsData = true
|
||||
if req.End < int64(minFirstReportedUnixMilli) {
|
||||
resp.EndTimeBeforeRetention = true
|
||||
resp.Records = []inframonitoringtypes.HostRecord{}
|
||||
resp.Total = 0
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// Determine active hosts: those with metrics reported in the last 10 minutes.
|
||||
activeHostsMap, err := m.getActiveHosts(ctx, hostsTableMetricNamesList, hostNameAttrKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// this check below modifies req.Filter by adding `AND active hosts filter` if req.FilterByStatus is set.
|
||||
if m.applyHostsActiveStatusFilter(req, activeHostsMap) {
|
||||
resp.Records = []inframonitoringtypes.HostRecord{}
|
||||
resp.Total = 0
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
metadataMap, err := m.getHostsTableMetadata(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if metadataMap == nil {
|
||||
metadataMap = make(map[string]map[string]string)
|
||||
}
|
||||
|
||||
resp.Total = len(metadataMap)
|
||||
|
||||
pageGroups, err := m.getTopHostGroups(ctx, orgID, req, metadataMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(pageGroups) == 0 {
|
||||
resp.Records = []inframonitoringtypes.HostRecord{}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
hostsFilterExpr := ""
|
||||
if req.Filter != nil {
|
||||
hostsFilterExpr = req.Filter.Expression
|
||||
}
|
||||
fullQueryReq := buildFullQueryRequest(req.Start, req.End, hostsFilterExpr, req.GroupBy, pageGroups, m.newHostsTableListQuery())
|
||||
queryResp, err := m.querier.QueryRange(ctx, orgID, fullQueryReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp.Records = m.buildHostRecords(queryResp, pageGroups, req.GroupBy, metadataMap, activeHostsMap)
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (m *module) PodsList(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PodsListRequest) (*inframonitoringtypes.PodsListResponse, error) {
|
||||
if err := req.Validate(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp := &inframonitoringtypes.PodsListResponse{}
|
||||
|
||||
if req.OrderBy == nil {
|
||||
req.OrderBy = &qbtypes.OrderBy{
|
||||
Key: qbtypes.OrderByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "cpu",
|
||||
},
|
||||
},
|
||||
Direction: qbtypes.OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
if err := m.validateOrderBy(req.OrderBy, orderByToPodsQueryNames); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(req.GroupBy) == 0 {
|
||||
req.GroupBy = []qbtypes.GroupByKey{podUIDGroupByKey}
|
||||
resp.Type = ResponseTypeList
|
||||
} else {
|
||||
resp.Type = ResponseTypeGroupedList
|
||||
}
|
||||
|
||||
count, minFirstReportedUnixMilli, err := m.getMetricsExistenceAndEarliestTime(ctx, podsTableMetricNamesList)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if count == 0 {
|
||||
resp.SentAnyMetricsData = false
|
||||
resp.Records = []inframonitoringtypes.PodRecord{}
|
||||
resp.Total = 0
|
||||
return resp, nil
|
||||
}
|
||||
resp.SentAnyMetricsData = true
|
||||
if req.End < int64(minFirstReportedUnixMilli) {
|
||||
resp.EndTimeBeforeRetention = true
|
||||
resp.Records = []inframonitoringtypes.PodRecord{}
|
||||
resp.Total = 0
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
metadataMap, err := m.getPodsTableMetadata(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if metadataMap == nil {
|
||||
metadataMap = make(map[string]map[string]string)
|
||||
}
|
||||
|
||||
resp.Total = len(metadataMap)
|
||||
|
||||
pageGroups, err := m.getTopPodGroups(ctx, orgID, req, metadataMap)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(pageGroups) == 0 {
|
||||
resp.Records = []inframonitoringtypes.PodRecord{}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
filterExpr := ""
|
||||
if req.Filter != nil {
|
||||
filterExpr = req.Filter.Expression
|
||||
}
|
||||
fullQueryReq := buildFullQueryRequest(req.Start, req.End, filterExpr, req.GroupBy, pageGroups, m.newPodsTableListQuery())
|
||||
queryResp, err := m.querier.QueryRange(ctx, orgID, fullQueryReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp.Records = m.buildPodRecords(queryResp, pageGroups, req.GroupBy, metadataMap, req.End)
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
346
pkg/modules/inframonitoring/implinframonitoring/pods.go
Normal file
346
pkg/modules/inframonitoring/implinframonitoring/pods.go
Normal file
@@ -0,0 +1,346 @@
|
||||
package implinframonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
podUIDAttrKey = "k8s.pod.uid"
|
||||
podStartTimeAttrKey = "k8s.pod.start_time"
|
||||
)
|
||||
|
||||
var podUIDGroupByKey = qbtypes.GroupByKey{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: podUIDAttrKey,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
}
|
||||
|
||||
var podsTableMetricNamesList = []string{
|
||||
"k8s.pod.cpu.usage",
|
||||
"k8s.pod.cpu_request_utilization",
|
||||
"k8s.pod.cpu_limit_utilization",
|
||||
"k8s.pod.memory.working_set",
|
||||
"k8s.pod.memory_request_utilization",
|
||||
"k8s.pod.memory_limit_utilization",
|
||||
"k8s.pod.phase",
|
||||
}
|
||||
|
||||
var podAttrKeysForMetadata = []string{
|
||||
"k8s.pod.uid",
|
||||
"k8s.pod.name",
|
||||
"k8s.namespace.name",
|
||||
"k8s.node.name",
|
||||
"k8s.deployment.name",
|
||||
"k8s.statefulset.name",
|
||||
"k8s.daemonset.name",
|
||||
"k8s.job.name",
|
||||
"k8s.cronjob.name",
|
||||
"k8s.cluster.name",
|
||||
"k8s.pod.start_time",
|
||||
}
|
||||
|
||||
var orderByToPodsQueryNames = map[string][]string{
|
||||
"cpu": {"A"},
|
||||
"cpu_request": {"B"},
|
||||
"cpu_limit": {"C"},
|
||||
"memory": {"D"},
|
||||
"memory_request": {"E"},
|
||||
"memory_limit": {"F"},
|
||||
"phase": {"G"},
|
||||
}
|
||||
|
||||
func phaseNumberToString(v float64) string {
|
||||
switch int(v) {
|
||||
case 1:
|
||||
return "pending"
|
||||
case 2:
|
||||
return "running"
|
||||
case 3:
|
||||
return "succeeded"
|
||||
case 4:
|
||||
return "failed"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (m *module) newPodsTableListQuery() *qbtypes.QueryRangeRequest {
|
||||
return &qbtypes.QueryRangeRequest{
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{
|
||||
// Query A: CPU usage
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.cpu.usage",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query B: CPU request utilization
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.cpu_request_utilization",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query C: CPU limit utilization
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "C",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.cpu_limit_utilization",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query D: Memory working set
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "D",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.memory.working_set",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query E: Memory request utilization
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "E",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.memory_request_utilization",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query F: Memory limit utilization
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "F",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.memory_limit_utilization",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationAvg,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationAvg,
|
||||
ReduceTo: qbtypes.ReduceToAvg,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
// Query G: Pod phase (latest value per pod)
|
||||
{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "G",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "k8s.pod.phase",
|
||||
Temporality: metrictypes.Unspecified,
|
||||
TimeAggregation: metrictypes.TimeAggregationLatest,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationMax,
|
||||
ReduceTo: qbtypes.ReduceToLast,
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{podUIDGroupByKey},
|
||||
Disabled: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (m *module) getTopPodGroups(
|
||||
ctx context.Context,
|
||||
orgID valuer.UUID,
|
||||
req *inframonitoringtypes.PodsListRequest,
|
||||
metadataMap map[string]map[string]string,
|
||||
) ([]map[string]string, error) {
|
||||
orderByKey := req.OrderBy.Key.Name
|
||||
queryNamesForOrderBy := orderByToPodsQueryNames[orderByKey]
|
||||
rankingQueryName := queryNamesForOrderBy[len(queryNamesForOrderBy)-1]
|
||||
|
||||
topReq := &qbtypes.QueryRangeRequest{
|
||||
Start: uint64(req.Start),
|
||||
End: uint64(req.End),
|
||||
RequestType: qbtypes.RequestTypeScalar,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: make([]qbtypes.QueryEnvelope, 0, len(queryNamesForOrderBy)),
|
||||
},
|
||||
}
|
||||
|
||||
for _, envelope := range m.newPodsTableListQuery().CompositeQuery.Queries {
|
||||
if !slices.Contains(queryNamesForOrderBy, envelope.GetQueryName()) {
|
||||
continue
|
||||
}
|
||||
copied := envelope
|
||||
if copied.Type == qbtypes.QueryTypeBuilder {
|
||||
existingExpr := ""
|
||||
if f := copied.GetFilter(); f != nil {
|
||||
existingExpr = f.Expression
|
||||
}
|
||||
reqFilterExpr := ""
|
||||
if req.Filter != nil {
|
||||
reqFilterExpr = req.Filter.Expression
|
||||
}
|
||||
merged := mergeFilterExpressions(existingExpr, reqFilterExpr)
|
||||
copied.SetFilter(&qbtypes.Filter{Expression: merged})
|
||||
copied.SetGroupBy(req.GroupBy)
|
||||
}
|
||||
topReq.CompositeQuery.Queries = append(topReq.CompositeQuery.Queries, copied)
|
||||
}
|
||||
|
||||
resp, err := m.querier.QueryRange(ctx, orgID, topReq)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
allMetricGroups := parseAndSortGroups(resp, rankingQueryName, req.GroupBy, req.OrderBy.Direction)
|
||||
return paginateWithBackfill(allMetricGroups, metadataMap, req.GroupBy, req.Offset, req.Limit), nil
|
||||
}
|
||||
|
||||
func (m *module) getPodsTableMetadata(ctx context.Context, req *inframonitoringtypes.PodsListRequest) (map[string]map[string]string, error) {
|
||||
var nonGroupByAttrs []string
|
||||
for _, key := range podAttrKeysForMetadata {
|
||||
if !isKeyInGroupByAttrs(req.GroupBy, key) {
|
||||
nonGroupByAttrs = append(nonGroupByAttrs, key)
|
||||
}
|
||||
}
|
||||
return m.getMetadata(ctx, podsTableMetricNamesList, req.GroupBy, nonGroupByAttrs, req.Filter, req.Start, req.End)
|
||||
}
|
||||
|
||||
func (m *module) buildPodRecords(
|
||||
resp *qbtypes.QueryRangeResponse,
|
||||
pageGroups []map[string]string,
|
||||
groupBy []qbtypes.GroupByKey,
|
||||
metadataMap map[string]map[string]string,
|
||||
reqEnd int64,
|
||||
) []inframonitoringtypes.PodRecord {
|
||||
metricsMap := parseFullQueryResponse(resp, groupBy)
|
||||
|
||||
records := make([]inframonitoringtypes.PodRecord, 0, len(pageGroups))
|
||||
for _, labels := range pageGroups {
|
||||
compositeKey := compositeKeyFromLabels(labels, groupBy)
|
||||
podUID := labels[podUIDAttrKey]
|
||||
|
||||
record := inframonitoringtypes.PodRecord{
|
||||
PodUID: podUID,
|
||||
PodCPU: -1,
|
||||
PodCPURequest: -1,
|
||||
PodCPULimit: -1,
|
||||
PodMemory: -1,
|
||||
PodMemoryRequest: -1,
|
||||
PodMemoryLimit: -1,
|
||||
PodAge: -1,
|
||||
Meta: map[string]interface{}{},
|
||||
}
|
||||
|
||||
if metrics, ok := metricsMap[compositeKey]; ok {
|
||||
if v, exists := metrics["A"]; exists {
|
||||
record.PodCPU = v
|
||||
}
|
||||
if v, exists := metrics["B"]; exists {
|
||||
record.PodCPURequest = v
|
||||
}
|
||||
if v, exists := metrics["C"]; exists {
|
||||
record.PodCPULimit = v
|
||||
}
|
||||
if v, exists := metrics["D"]; exists {
|
||||
record.PodMemory = v
|
||||
}
|
||||
if v, exists := metrics["E"]; exists {
|
||||
record.PodMemoryRequest = v
|
||||
}
|
||||
if v, exists := metrics["F"]; exists {
|
||||
record.PodMemoryLimit = v
|
||||
}
|
||||
if v, exists := metrics["G"]; exists {
|
||||
record.PodPhase = phaseNumberToString(v)
|
||||
}
|
||||
}
|
||||
|
||||
if attrs, ok := metadataMap[compositeKey]; ok {
|
||||
if startTimeStr, exists := attrs[podStartTimeAttrKey]; exists && startTimeStr != "" {
|
||||
if t, err := time.Parse(time.RFC3339, startTimeStr); err == nil {
|
||||
startTimeMs := t.UnixMilli()
|
||||
if startTimeMs > 0 {
|
||||
record.PodAge = reqEnd - startTimeMs
|
||||
}
|
||||
}
|
||||
}
|
||||
for k, v := range attrs {
|
||||
record.Meta[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
records = append(records, record)
|
||||
}
|
||||
return records
|
||||
}
|
||||
19
pkg/modules/inframonitoring/inframonitoring.go
Normal file
19
pkg/modules/inframonitoring/inframonitoring.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package inframonitoring
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/inframonitoringtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Handler interface {
|
||||
HostsList(http.ResponseWriter, *http.Request)
|
||||
PodsList(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
type Module interface {
|
||||
HostsList(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.HostsListRequest) (*inframonitoringtypes.HostsListResponse, error)
|
||||
PodsList(ctx context.Context, orgID valuer.UUID, req *inframonitoringtypes.PodsListRequest) (*inframonitoringtypes.PodsListResponse, error)
|
||||
}
|
||||
@@ -32,7 +32,7 @@ func newConfig() factory.Config {
|
||||
Domain: "signozserviceaccount.com",
|
||||
},
|
||||
Analytics: AnalyticsConfig{
|
||||
Enabled: false,
|
||||
Enabled: true,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
package tracedetail
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/tracedetailtypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Handler exposes HTTP handlers for trace detail APIs.
|
||||
type Handler interface {
|
||||
GetWaterfall(http.ResponseWriter, *http.Request)
|
||||
}
|
||||
|
||||
// Module defines the business logic for trace detail operations.
|
||||
type Module interface {
|
||||
GetWaterfall(ctx context.Context, orgID valuer.UUID, traceID string, req *tracedetailtypes.WaterfallRequest) (*tracedetailtypes.WaterfallResponse, error)
|
||||
}
|
||||
@@ -40,7 +40,6 @@ type querier struct {
|
||||
promEngine prometheus.Prometheus
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||
auditStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder
|
||||
@@ -57,7 +56,6 @@ func New(
|
||||
promEngine prometheus.Prometheus,
|
||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
auditStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder,
|
||||
@@ -71,7 +69,6 @@ func New(
|
||||
promEngine: promEngine,
|
||||
traceStmtBuilder: traceStmtBuilder,
|
||||
logStmtBuilder: logStmtBuilder,
|
||||
auditStmtBuilder: auditStmtBuilder,
|
||||
metricStmtBuilder: metricStmtBuilder,
|
||||
meterStmtBuilder: meterStmtBuilder,
|
||||
traceOperatorStmtBuilder: traceOperatorStmtBuilder,
|
||||
@@ -364,11 +361,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||
stmtBuilder := q.logStmtBuilder
|
||||
if spec.Source == telemetrytypes.SourceAudit {
|
||||
stmtBuilder = q.auditStmtBuilder
|
||||
}
|
||||
bq := newBuilderQuery(q.logger, q.telemetryStore, stmtBuilder, spec, timeRange, req.RequestType, tmplVars)
|
||||
bq := newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, tmplVars)
|
||||
queries[spec.Name] = bq
|
||||
steps[spec.Name] = spec.StepInterval
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
@@ -390,15 +383,15 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
spec.Aggregations[i].Temporality = temp
|
||||
}
|
||||
}
|
||||
if spec.Aggregations[i].Temporality == metrictypes.Unknown {
|
||||
missingMetrics = append(missingMetrics, spec.Aggregations[i].MetricName)
|
||||
continue
|
||||
}
|
||||
if spec.Aggregations[i].MetricName != "" && spec.Aggregations[i].Type == metrictypes.UnspecifiedType {
|
||||
if foundMetricType, ok := metricTypes[spec.Aggregations[i].MetricName]; ok && foundMetricType != metrictypes.UnspecifiedType {
|
||||
spec.Aggregations[i].Type = foundMetricType
|
||||
}
|
||||
}
|
||||
if spec.Aggregations[i].Type == metrictypes.UnspecifiedType {
|
||||
missingMetrics = append(missingMetrics, spec.Aggregations[i].MetricName)
|
||||
continue
|
||||
}
|
||||
presentAggregations = append(presentAggregations, spec.Aggregations[i])
|
||||
}
|
||||
if len(presentAggregations) == 0 {
|
||||
@@ -557,11 +550,7 @@ func (q *querier) QueryRawStream(ctx context.Context, orgID valuer.UUID, req *qb
|
||||
case <-tick:
|
||||
// timestamp end is not specified here
|
||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: tsStart}, req.RequestType)
|
||||
liveTailStmtBuilder := q.logStmtBuilder
|
||||
if spec.Source == telemetrytypes.SourceAudit {
|
||||
liveTailStmtBuilder = q.auditStmtBuilder
|
||||
}
|
||||
bq := newBuilderQuery(q.logger, q.telemetryStore, liveTailStmtBuilder, spec, timeRange, req.RequestType, map[string]qbtypes.VariableItem{
|
||||
bq := newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, map[string]qbtypes.VariableItem{
|
||||
"id": {
|
||||
Value: updatedLogID,
|
||||
},
|
||||
@@ -861,11 +850,7 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
||||
specCopy := qt.spec.Copy()
|
||||
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
|
||||
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
|
||||
shiftStmtBuilder := q.logStmtBuilder
|
||||
if qt.spec.Source == telemetrytypes.SourceAudit {
|
||||
shiftStmtBuilder = q.auditStmtBuilder
|
||||
}
|
||||
return newBuilderQuery(q.logger, q.telemetryStore, shiftStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
|
||||
return newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
|
||||
|
||||
case *builderQuery[qbtypes.MetricAggregation]:
|
||||
specCopy := qt.spec.Copy()
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
cmock "github.com/srikanthccv/ClickHouse-go-mock"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type queryMatcherAny struct{}
|
||||
|
||||
func (m *queryMatcherAny) Match(string, string) error { return nil }
|
||||
|
||||
// mockMetricStmtBuilder implements qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||
// and returns a fixed query string so the mock ClickHouse can match it.
|
||||
type mockMetricStmtBuilder struct{}
|
||||
|
||||
func (m *mockMetricStmtBuilder) Build(_ context.Context, _, _ uint64, _ qbtypes.RequestType, _ qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], _ map[string]qbtypes.VariableItem) (*qbtypes.Statement, error) {
|
||||
return &qbtypes.Statement{
|
||||
Query: "SELECT ts, value FROM signoz_metrics",
|
||||
Args: nil,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func TestQueryRange_MetricTypeMissing(t *testing.T) {
|
||||
// When a metric has UnspecifiedType and is not found in the metadata store,
|
||||
// the querier should return a not-found error, even if the request provides a temporality
|
||||
providerSettings := instrumentationtest.New().ToProviderSettings()
|
||||
metadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
|
||||
q := New(
|
||||
providerSettings,
|
||||
nil, // telemetryStore
|
||||
metadataStore,
|
||||
nil, // prometheus
|
||||
nil, // traceStmtBuilder
|
||||
nil, // logStmtBuilder
|
||||
nil, // auditStmtBuilder
|
||||
nil, // metricStmtBuilder
|
||||
nil, // meterStmtBuilder
|
||||
nil, // traceOperatorStmtBuilder
|
||||
nil, // bucketCache
|
||||
)
|
||||
|
||||
req := &qbtypes.QueryRangeRequest{
|
||||
Start: uint64(time.Now().Add(-5 * time.Minute).UnixMilli()),
|
||||
End: uint64(time.Now().UnixMilli()),
|
||||
RequestType: qbtypes.RequestTypeTimeSeries,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: time.Minute},
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "unknown_metric",
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
_, err := q.QueryRange(context.Background(), valuer.GenerateUUID(), req)
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "could not find the metric unknown_metric")
|
||||
}
|
||||
|
||||
func TestQueryRange_MetricTypeFromStore(t *testing.T) {
|
||||
// When a metric has UnspecifiedType but the metadata store returns a valid type,
|
||||
// the metric should not be treated as missing.
|
||||
providerSettings := instrumentationtest.New().ToProviderSettings()
|
||||
metadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
metadataStore.TypeMap["my_metric"] = metrictypes.SumType
|
||||
metadataStore.TemporalityMap["my_metric"] = metrictypes.Cumulative
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
cols := []cmock.ColumnType{
|
||||
{Name: "ts", Type: "DateTime"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
}
|
||||
rows := cmock.NewRows(cols, [][]any{
|
||||
{time.Now(), float64(42)},
|
||||
})
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT any").
|
||||
WillReturnRows(rows)
|
||||
|
||||
q := New(
|
||||
providerSettings,
|
||||
telemetryStore,
|
||||
metadataStore,
|
||||
nil, // prometheus
|
||||
nil, // traceStmtBuilder
|
||||
nil, // logStmtBuilder
|
||||
nil, // auditStmtBuilder
|
||||
&mockMetricStmtBuilder{}, // metricStmtBuilder
|
||||
nil, // meterStmtBuilder
|
||||
nil, // traceOperatorStmtBuilder
|
||||
nil, // bucketCache
|
||||
)
|
||||
|
||||
req := &qbtypes.QueryRangeRequest{
|
||||
Start: uint64(time.Now().Add(-5 * time.Minute).UnixMilli()),
|
||||
End: uint64(time.Now().UnixMilli()),
|
||||
RequestType: qbtypes.RequestTypeTimeSeries,
|
||||
CompositeQuery: qbtypes.CompositeQuery{
|
||||
Queries: []qbtypes.QueryEnvelope{{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
StepInterval: qbtypes.Step{Duration: time.Minute},
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "my_metric",
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationSum,
|
||||
},
|
||||
},
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := q.QueryRange(context.Background(), valuer.GenerateUUID(), req)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
}
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetryaudit"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||
@@ -64,11 +63,6 @@ func newProvider(
|
||||
telemetrylogs.TagAttributesV2TableName,
|
||||
telemetrylogs.LogAttributeKeysTblName,
|
||||
telemetrylogs.LogResourceKeysTblName,
|
||||
telemetryaudit.DBName,
|
||||
telemetryaudit.AuditLogsTableName,
|
||||
telemetryaudit.TagAttributesTableName,
|
||||
telemetryaudit.LogAttributeKeysTblName,
|
||||
telemetryaudit.LogResourceKeysTblName,
|
||||
telemetrymetadata.DBName,
|
||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||
@@ -88,13 +82,13 @@ func newProvider(
|
||||
telemetryStore,
|
||||
)
|
||||
|
||||
// Create trace operator statement builder
|
||||
// ADD: Create trace operator statement builder
|
||||
traceOperatorStmtBuilder := telemetrytraces.NewTraceOperatorStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
traceFieldMapper,
|
||||
traceConditionBuilder,
|
||||
traceStmtBuilder,
|
||||
traceStmtBuilder, // Pass the regular trace statement builder
|
||||
traceAggExprRewriter,
|
||||
)
|
||||
|
||||
@@ -118,26 +112,6 @@ func newProvider(
|
||||
telemetrylogs.GetBodyJSONKey,
|
||||
)
|
||||
|
||||
// Create audit statement builder
|
||||
auditFieldMapper := telemetryaudit.NewFieldMapper()
|
||||
auditConditionBuilder := telemetryaudit.NewConditionBuilder(auditFieldMapper)
|
||||
auditAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||
settings,
|
||||
telemetryaudit.DefaultFullTextColumn,
|
||||
auditFieldMapper,
|
||||
auditConditionBuilder,
|
||||
nil,
|
||||
)
|
||||
auditStmtBuilder := telemetryaudit.NewAuditQueryStatementBuilder(
|
||||
settings,
|
||||
telemetryMetadataStore,
|
||||
auditFieldMapper,
|
||||
auditConditionBuilder,
|
||||
auditAggExprRewriter,
|
||||
telemetryaudit.DefaultFullTextColumn,
|
||||
nil,
|
||||
)
|
||||
|
||||
// Create metric statement builder
|
||||
metricFieldMapper := telemetrymetrics.NewFieldMapper()
|
||||
metricConditionBuilder := telemetrymetrics.NewConditionBuilder(metricFieldMapper)
|
||||
@@ -174,7 +148,6 @@ func newProvider(
|
||||
prometheus,
|
||||
traceStmtBuilder,
|
||||
logStmtBuilder,
|
||||
auditStmtBuilder,
|
||||
metricStmtBuilder,
|
||||
meterStmtBuilder,
|
||||
traceOperatorStmtBuilder,
|
||||
|
||||
@@ -110,7 +110,7 @@ func WithEvalDelay(dur valuer.TextDuration) RuleOption {
|
||||
|
||||
func WithLogger(logger *slog.Logger) RuleOption {
|
||||
return func(r *BaseRule) {
|
||||
r.logger = logger.With(slog.String("rule.id", r.id))
|
||||
r.logger = logger
|
||||
}
|
||||
}
|
||||
|
||||
@@ -248,7 +248,7 @@ func (r *BaseRule) SelectedQuery(ctx context.Context) string {
|
||||
if r.ruleCondition.SelectedQuery != "" {
|
||||
return r.ruleCondition.SelectedQuery
|
||||
}
|
||||
r.logger.WarnContext(ctx, "missing selected query")
|
||||
r.logger.WarnContext(ctx, "missing selected query", slog.String("rule.id", r.ID()))
|
||||
return r.ruleCondition.SelectedQueryName()
|
||||
}
|
||||
|
||||
@@ -368,7 +368,7 @@ func (r *BaseRule) SendAlerts(ctx context.Context, ts time.Time, resendDelay tim
|
||||
alerts = append(alerts, &anew)
|
||||
}
|
||||
})
|
||||
notifyFunc(ctx, orgID, alerts...)
|
||||
notifyFunc(ctx, orgID, "", alerts...)
|
||||
}
|
||||
|
||||
func (r *BaseRule) ForEachActiveAlert(f func(*ruletypes.Alert)) {
|
||||
@@ -380,13 +380,13 @@ func (r *BaseRule) ForEachActiveAlert(f func(*ruletypes.Alert)) {
|
||||
}
|
||||
}
|
||||
|
||||
func (r *BaseRule) RecordRuleStateHistory(ctx context.Context, itemsToAdd []rulestatehistorytypes.RuleStateHistory) error {
|
||||
func (r *BaseRule) RecordRuleStateHistory(ctx context.Context, prevState, currentState ruletypes.AlertState, itemsToAdd []rulestatehistorytypes.RuleStateHistory) error {
|
||||
if r.ruleStateHistoryModule == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := r.ruleStateHistoryModule.RecordRuleStateHistory(ctx, r.ID(), r.handledRestart, itemsToAdd); err != nil {
|
||||
r.logger.ErrorContext(ctx, "error while recording rule state history", errors.Attr(err), slog.Any("items_to_add", itemsToAdd))
|
||||
r.logger.ErrorContext(ctx, "error while recording rule state history", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("items_to_add", itemsToAdd))
|
||||
return err
|
||||
}
|
||||
r.handledRestart = true
|
||||
@@ -580,12 +580,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
// Check if first_seen + delay has passed
|
||||
if maxFirstSeen+newGroupEvalDelayMs > evalTimeMs {
|
||||
// Still within grace period, skip this series
|
||||
r.logger.InfoContext(
|
||||
ctx, "skipping new series",
|
||||
slog.Int("series.index", i), slog.Int64("series.max_first_seen", maxFirstSeen),
|
||||
slog.Int64("eval.time_ms", evalTimeMs), slog.Int64("eval.delay_ms", newGroupEvalDelayMs),
|
||||
slog.Any("series.labels", series[i].Labels),
|
||||
)
|
||||
r.logger.InfoContext(ctx, "skipping new series", slog.String("rule.id", r.ID()), slog.Int("series.index", i), slog.Int64("series.max_first_seen", maxFirstSeen), slog.Int64("eval.time_ms", evalTimeMs), slog.Int64("eval.delay_ms", newGroupEvalDelayMs), slog.Any("series.labels", series[i].Labels))
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -595,11 +590,7 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
|
||||
skippedCount := len(series) - len(filteredSeries)
|
||||
if skippedCount > 0 {
|
||||
r.logger.InfoContext(
|
||||
ctx, "filtered new series",
|
||||
slog.Int("series.skipped_count", skippedCount), slog.Int("series.total_count", len(series)),
|
||||
slog.Int64("eval.delay_ms", newGroupEvalDelayMs),
|
||||
)
|
||||
r.logger.InfoContext(ctx, "filtered new series", slog.String("rule.id", r.ID()), slog.Int("series.skipped_count", skippedCount), slog.Int("series.total_count", len(series)), slog.Int64("eval.delay_ms", newGroupEvalDelayMs))
|
||||
}
|
||||
|
||||
return filteredSeries, nil
|
||||
@@ -620,7 +611,7 @@ func (r *BaseRule) HandleMissingDataAlert(ctx context.Context, ts time.Time, has
|
||||
return nil
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "no data found for rule condition")
|
||||
r.logger.InfoContext(ctx, "no data found for rule condition", slog.String("rule.id", r.ID()))
|
||||
lbls := ruletypes.NewBuilder()
|
||||
if !r.lastTimestampWithDatapoints.IsZero() {
|
||||
lbls.Set(ruletypes.LabelLastSeen, r.lastTimestampWithDatapoints.Format(ruletypes.AlertTimeFormat))
|
||||
|
||||
@@ -438,7 +438,7 @@ func (m *Manager) editTask(_ context.Context, orgID valuer.UUID, rule *ruletypes
|
||||
Logger: m.opts.Logger,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.notifyFunc,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
OrgID: orgID,
|
||||
})
|
||||
@@ -651,7 +651,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
|
||||
Logger: m.opts.Logger,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.notifyFunc,
|
||||
NotifyFunc: m.prepareNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
OrgID: orgID,
|
||||
})
|
||||
@@ -752,65 +752,70 @@ func (m *Manager) TriggeredAlerts() []*ruletypes.NamedAlert {
|
||||
}
|
||||
|
||||
// NotifyFunc sends notifications about a set of alerts generated by the given expression.
|
||||
type NotifyFunc func(ctx context.Context, orgID string, alerts ...*ruletypes.Alert)
|
||||
type NotifyFunc func(ctx context.Context, orgID string, expr string, alerts ...*ruletypes.Alert)
|
||||
|
||||
// notifyFunc implements the NotifyFunc for a Notifier.
|
||||
func (m *Manager) notifyFunc(ctx context.Context, orgID string, alerts ...*ruletypes.Alert) {
|
||||
var res []*alertmanagertypes.PostableAlert
|
||||
// prepareNotifyFunc implements the NotifyFunc for a Notifier.
|
||||
func (m *Manager) prepareNotifyFunc() NotifyFunc {
|
||||
return func(ctx context.Context, orgID string, expr string, alerts ...*ruletypes.Alert) {
|
||||
var res []*alertmanagertypes.PostableAlert
|
||||
|
||||
for _, alert := range alerts {
|
||||
generatorURL := alert.GeneratorURL
|
||||
for _, alert := range alerts {
|
||||
generatorURL := alert.GeneratorURL
|
||||
|
||||
a := &alertmanagertypes.PostableAlert{
|
||||
Annotations: alert.Annotations.Map(),
|
||||
StartsAt: strfmt.DateTime(alert.FiredAt),
|
||||
Alert: alertmanagertypes.AlertModel{
|
||||
Labels: alert.Labels.Map(),
|
||||
GeneratorURL: strfmt.URI(generatorURL),
|
||||
},
|
||||
}
|
||||
if !alert.ResolvedAt.IsZero() {
|
||||
a.EndsAt = strfmt.DateTime(alert.ResolvedAt)
|
||||
} else {
|
||||
a.EndsAt = strfmt.DateTime(alert.ValidUntil)
|
||||
a := &alertmanagertypes.PostableAlert{
|
||||
Annotations: alert.Annotations.Map(),
|
||||
StartsAt: strfmt.DateTime(alert.FiredAt),
|
||||
Alert: alertmanagertypes.AlertModel{
|
||||
Labels: alert.Labels.Map(),
|
||||
GeneratorURL: strfmt.URI(generatorURL),
|
||||
},
|
||||
}
|
||||
if !alert.ResolvedAt.IsZero() {
|
||||
a.EndsAt = strfmt.DateTime(alert.ResolvedAt)
|
||||
} else {
|
||||
a.EndsAt = strfmt.DateTime(alert.ValidUntil)
|
||||
}
|
||||
|
||||
res = append(res, a)
|
||||
}
|
||||
|
||||
res = append(res, a)
|
||||
}
|
||||
|
||||
if len(alerts) > 0 {
|
||||
m.alertmanager.PutAlerts(ctx, orgID, res)
|
||||
if len(alerts) > 0 {
|
||||
m.alertmanager.PutAlerts(ctx, orgID, res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Manager) testNotifyFunc(ctx context.Context, orgID string, alerts ...*ruletypes.Alert) {
|
||||
if len(alerts) == 0 {
|
||||
return
|
||||
}
|
||||
ruleID := alerts[0].Labels.Map()[ruletypes.AlertRuleIDLabel]
|
||||
receiverMap := make(map[*alertmanagertypes.PostableAlert][]string)
|
||||
for _, alert := range alerts {
|
||||
generatorURL := alert.GeneratorURL
|
||||
func (m *Manager) prepareTestNotifyFunc() NotifyFunc {
|
||||
return func(ctx context.Context, orgID string, expr string, alerts ...*ruletypes.Alert) {
|
||||
if len(alerts) == 0 {
|
||||
return
|
||||
}
|
||||
ruleID := alerts[0].Labels.Map()[ruletypes.AlertRuleIDLabel]
|
||||
receiverMap := make(map[*alertmanagertypes.PostableAlert][]string)
|
||||
for _, alert := range alerts {
|
||||
generatorURL := alert.GeneratorURL
|
||||
|
||||
a := &alertmanagertypes.PostableAlert{}
|
||||
a.Annotations = alert.Annotations.Map()
|
||||
a.StartsAt = strfmt.DateTime(alert.FiredAt)
|
||||
labelsMap := alert.Labels.Map()
|
||||
labelsMap[ruletypes.TestAlertLabel] = "true"
|
||||
a.Alert = alertmanagertypes.AlertModel{
|
||||
Labels: labelsMap,
|
||||
GeneratorURL: strfmt.URI(generatorURL),
|
||||
a := &alertmanagertypes.PostableAlert{}
|
||||
a.Annotations = alert.Annotations.Map()
|
||||
a.StartsAt = strfmt.DateTime(alert.FiredAt)
|
||||
labelsMap := alert.Labels.Map()
|
||||
labelsMap[ruletypes.TestAlertLabel] = "true"
|
||||
a.Alert = alertmanagertypes.AlertModel{
|
||||
Labels: labelsMap,
|
||||
GeneratorURL: strfmt.URI(generatorURL),
|
||||
}
|
||||
if !alert.ResolvedAt.IsZero() {
|
||||
a.EndsAt = strfmt.DateTime(alert.ResolvedAt)
|
||||
} else {
|
||||
a.EndsAt = strfmt.DateTime(alert.ValidUntil)
|
||||
}
|
||||
receiverMap[a] = alert.Receivers
|
||||
}
|
||||
if !alert.ResolvedAt.IsZero() {
|
||||
a.EndsAt = strfmt.DateTime(alert.ResolvedAt)
|
||||
} else {
|
||||
a.EndsAt = strfmt.DateTime(alert.ValidUntil)
|
||||
err := m.alertmanager.TestAlert(ctx, orgID, ruleID, receiverMap)
|
||||
if err != nil {
|
||||
m.logger.ErrorContext(ctx, "failed to send test notification", errors.Attr(err))
|
||||
return
|
||||
}
|
||||
receiverMap[a] = alert.Receivers
|
||||
}
|
||||
err := m.alertmanager.TestAlert(ctx, orgID, ruleID, receiverMap)
|
||||
if err != nil {
|
||||
m.logger.ErrorContext(ctx, "failed to send test notification", errors.Attr(err))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1036,7 +1041,7 @@ func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleS
|
||||
Logger: m.opts.Logger,
|
||||
Cache: m.cache,
|
||||
ManagerOpts: m.opts,
|
||||
NotifyFunc: m.testNotifyFunc,
|
||||
NotifyFunc: m.prepareTestNotifyFunc(),
|
||||
SQLStore: m.sqlstore,
|
||||
OrgID: orgID,
|
||||
})
|
||||
|
||||
@@ -48,13 +48,14 @@ func NewPromRule(
|
||||
version: postableRule.Version,
|
||||
prometheus: prometheus,
|
||||
}
|
||||
p.logger = logger
|
||||
|
||||
query, err := p.getPqlQuery(context.Background())
|
||||
if err != nil {
|
||||
// can not generate a valid prom QL query
|
||||
return nil, err
|
||||
}
|
||||
p.logger.Info("creating new prom rule", slog.String("rule.query", query))
|
||||
logger.Info("creating new prom rule", slog.String("rule.id", id), slog.String("rule.query", query))
|
||||
return &p, nil
|
||||
}
|
||||
|
||||
@@ -96,7 +97,7 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r.logger.InfoContext(ctx, "evaluating promql query", slog.String("rule.query", q))
|
||||
r.logger.InfoContext(ctx, "evaluating promql query", slog.String("rule.id", r.ID()), slog.String("rule.query", q))
|
||||
res, err := r.RunAlertQuery(ctx, q, start, end, interval)
|
||||
if err != nil {
|
||||
r.SetHealth(ruletypes.HealthBad)
|
||||
@@ -116,7 +117,7 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", errors.Attr(filterErr))
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", slog.String("rule.id", r.ID()), errors.Attr(filterErr))
|
||||
} else {
|
||||
matrixToProcess = filteredSeries
|
||||
}
|
||||
@@ -128,8 +129,7 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "not enough data points to evaluate series, skipping",
|
||||
slog.Int("series.num_points", len(series.Values)),
|
||||
slog.Int("series.required_points", r.Condition().RequiredNumPoints),
|
||||
"rule.id", r.ID(), "num_points", len(series.Values), "required_points", r.Condition().RequiredNumPoints,
|
||||
)
|
||||
continue
|
||||
}
|
||||
@@ -173,7 +173,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
for _, lbl := range result.Metric {
|
||||
l[lbl.Name] = lbl.Value
|
||||
}
|
||||
r.logger.DebugContext(ctx, "alerting for series", slog.Any("series", result))
|
||||
r.logger.DebugContext(ctx, "alerting for series", slog.String("rule.id", r.ID()), slog.Any("series", result))
|
||||
|
||||
threshold := valueFormatter.Format(result.Target, result.TargetUnit)
|
||||
|
||||
@@ -193,7 +193,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
r.logger.WarnContext(ctx, "expanding alert template failed", errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
r.logger.WarnContext(ctx, "expanding alert template failed", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -244,7 +244,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.Int("alert.count", len(alerts)))
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.String("rule.id", r.ID()), slog.Int("alert.count", len(alerts)))
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
@@ -271,7 +271,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
for fp, a := range r.Active {
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLabels)
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err))
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", slog.String("rule.id", r.ID()), errors.Attr(err))
|
||||
}
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
@@ -325,7 +325,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
state = ruletypes.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.Any("alert.state", state))
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.String("rule.id", r.ID()), slog.Any("alert.state", state))
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
@@ -350,7 +350,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
itemsToAdd[idx] = item
|
||||
}
|
||||
|
||||
_ = r.RecordRuleStateHistory(ctx, itemsToAdd)
|
||||
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
|
||||
|
||||
return len(r.Active), nil
|
||||
}
|
||||
|
||||
@@ -41,7 +41,11 @@ type Rule interface {
|
||||
SetEvaluationTimestamp(time.Time)
|
||||
GetEvaluationTimestamp() time.Time
|
||||
|
||||
RecordRuleStateHistory(ctx context.Context, itemsToAdd []rulestatehistorytypes.RuleStateHistory) error
|
||||
RecordRuleStateHistory(
|
||||
ctx context.Context,
|
||||
prevState, currentState ruletypes.AlertState,
|
||||
itemsToAdd []rulestatehistorytypes.RuleStateHistory,
|
||||
) error
|
||||
|
||||
SendAlerts(
|
||||
ctx context.Context,
|
||||
|
||||
@@ -2,7 +2,6 @@ package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"runtime/debug"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
@@ -309,10 +308,7 @@ func (g *RuleTask) Eval(ctx context.Context, ts time.Time) {
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
g.logger.ErrorContext(
|
||||
ctx, "panic during rule evaluation", slog.Any("panic", r),
|
||||
slog.String("stack", string(debug.Stack())),
|
||||
)
|
||||
g.logger.ErrorContext(ctx, "panic during threshold rule evaluation", "panic", r)
|
||||
}
|
||||
}()
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.TelemetryStore) (querier.Querier, *telemetrytypestest.MockMetadataStore) {
|
||||
func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.TelemetryStore) querier.Querier {
|
||||
providerSettings := instrumentationtest.New().ToProviderSettings()
|
||||
metadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
|
||||
@@ -46,12 +46,11 @@ func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.Teleme
|
||||
nil, // prometheus
|
||||
nil, // traceStmtBuilder
|
||||
nil, // logStmtBuilder
|
||||
nil, // auditStmtBuilder
|
||||
metricStmtBuilder,
|
||||
nil, // meterStmtBuilder
|
||||
nil, // traceOperatorStmtBuilder
|
||||
nil, // bucketCache
|
||||
), metadataStore
|
||||
)
|
||||
}
|
||||
|
||||
func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap map[string][]*telemetrytypes.TelemetryFieldKey) querier.Querier {
|
||||
@@ -92,7 +91,6 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
|
||||
nil, // prometheus
|
||||
nil, // traceStmtBuilder
|
||||
logStmtBuilder, // logStmtBuilder
|
||||
nil, // auditStmtBuilder
|
||||
nil, // metricStmtBuilder
|
||||
nil, // meterStmtBuilder
|
||||
nil, // traceOperatorStmtBuilder
|
||||
@@ -133,7 +131,6 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM
|
||||
nil, // prometheus
|
||||
traceStmtBuilder, // traceStmtBuilder
|
||||
nil, // logStmtBuilder
|
||||
nil, // auditStmtBuilder
|
||||
nil, // metricStmtBuilder
|
||||
nil, // meterStmtBuilder
|
||||
nil, // traceOperatorStmtBuilder
|
||||
|
||||
@@ -40,7 +40,7 @@ func NewThresholdRule(
|
||||
logger *slog.Logger,
|
||||
opts ...RuleOption,
|
||||
) (*ThresholdRule, error) {
|
||||
logger.Info("creating new ThresholdRule", slog.String("rule.id", id))
|
||||
logger.Info("creating new ThresholdRule", "id", id)
|
||||
|
||||
opts = append(opts, WithLogger(logger))
|
||||
|
||||
@@ -76,6 +76,7 @@ func (r *ThresholdRule) prepareQueryRange(ctx context.Context, ts time.Time) (*q
|
||||
slog.Int64("ts", ts.UnixMilli()),
|
||||
slog.Int64("eval_window", r.evalWindow.Milliseconds()),
|
||||
slog.Int64("eval_delay", r.evalDelay.Milliseconds()),
|
||||
slog.String("rule.id", r.ID()),
|
||||
)
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
@@ -198,7 +199,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
results = append(results, tsData)
|
||||
} else {
|
||||
// NOTE: should not happen but just to ensure we don't miss it if it happens for some reason
|
||||
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got unexpected type", slog.String("item.type", reflect.TypeOf(item).String()))
|
||||
r.logger.WarnContext(ctx, "expected qbtypes.TimeSeriesData but got unexpected type", slog.String("rule.id", r.ID()), slog.String("item.type", reflect.TypeOf(item).String()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -224,7 +225,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
if queryResult == nil || len(queryResult.Aggregations) == 0 || queryResult.Aggregations[0] == nil {
|
||||
r.logger.WarnContext(ctx, "query result is nil", slog.String("query.name", selectedQuery))
|
||||
r.logger.WarnContext(ctx, "query result is nil", slog.String("rule.id", r.ID()), slog.String("query.name", selectedQuery))
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
@@ -234,7 +235,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", errors.Attr(filterErr))
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", slog.String("rule.id", r.ID()), errors.Attr(filterErr))
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
@@ -242,11 +243,7 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(
|
||||
ctx, "not enough data points to evaluate series, skipping",
|
||||
slog.Int("series.num_points", len(series.Values)),
|
||||
slog.Int("series.required_points", r.Condition().RequiredNumPoints),
|
||||
)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", slog.String("rule.id", r.ID()), slog.Int("series.num_points", len(series.Values)), slog.Int("series.required_points", r.Condition().RequiredNumPoints))
|
||||
continue
|
||||
}
|
||||
resultSeries, err := r.Threshold.Eval(series, r.Unit(), ruletypes.EvalData{
|
||||
@@ -297,10 +294,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
// todo(aniket): handle different threshold
|
||||
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
|
||||
r.logger.DebugContext(
|
||||
ctx, "alert template data for rule", slog.String("formatter.name", valueFormatter.Name()),
|
||||
slog.String("alert.value", value), slog.String("alert.threshold", threshold),
|
||||
)
|
||||
r.logger.DebugContext(ctx, "alert template data for rule", slog.String("rule.id", r.ID()), slog.String("formatter.name", valueFormatter.Name()), slog.String("alert.value", value), slog.String("alert.threshold", threshold))
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
@@ -319,7 +313,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
r.logger.ErrorContext(ctx, "expanding alert template failed", errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
r.logger.ErrorContext(ctx, "expanding alert template failed", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -351,13 +345,13 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
case ruletypes.AlertTypeTraces:
|
||||
link := r.prepareLinksToTraces(ctx, ts, smpl.Metric)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
r.logger.InfoContext(ctx, "adding traces link to annotations", slog.String("annotation.link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)))
|
||||
r.logger.InfoContext(ctx, "adding traces link to annotations", slog.String("rule.id", r.ID()), slog.String("annotation.link", fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)))
|
||||
annotations = append(annotations, ruletypes.Label{Name: "related_traces", Value: fmt.Sprintf("%s/traces-explorer?%s", r.hostFromSource(), link)})
|
||||
}
|
||||
case ruletypes.AlertTypeLogs:
|
||||
link := r.prepareLinksToLogs(ctx, ts, smpl.Metric)
|
||||
if link != "" && r.hostFromSource() != "" {
|
||||
r.logger.InfoContext(ctx, "adding logs link to annotations", slog.String("annotation.link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)))
|
||||
r.logger.InfoContext(ctx, "adding logs link to annotations", slog.String("rule.id", r.ID()), slog.String("annotation.link", fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)))
|
||||
annotations = append(annotations, ruletypes.Label{Name: "related_logs", Value: fmt.Sprintf("%s/logs/logs-explorer?%s", r.hostFromSource(), link)})
|
||||
}
|
||||
}
|
||||
@@ -384,7 +378,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.Int("alert.count", len(alerts)))
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.String("rule.id", r.ID()), slog.Int("alert.count", len(alerts)))
|
||||
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
@@ -412,7 +406,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
for fp, a := range r.Active {
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLabels)
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), slog.Any("alert.labels", a.Labels))
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.labels", a.Labels))
|
||||
}
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
@@ -421,7 +415,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != ruletypes.StateInactive {
|
||||
r.logger.DebugContext(ctx, "converting firing alert to inactive")
|
||||
r.logger.DebugContext(ctx, "converting firing alert to inactive", slog.String("rule.id", r.ID()))
|
||||
a.State = ruletypes.StateInactive
|
||||
a.ResolvedAt = ts
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
@@ -439,7 +433,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
|
||||
if a.State == ruletypes.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
|
||||
r.logger.DebugContext(ctx, "converting pending alert to firing")
|
||||
r.logger.DebugContext(ctx, "converting pending alert to firing", slog.String("rule.id", r.ID()))
|
||||
a.State = ruletypes.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := ruletypes.StateFiring
|
||||
@@ -469,7 +463,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
state = ruletypes.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.Any("alert.state", state))
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.String("rule.id", r.ID()), slog.Any("alert.state", state))
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
@@ -492,7 +486,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
itemsToAdd[idx] = item
|
||||
}
|
||||
|
||||
_ = r.RecordRuleStateHistory(ctx, itemsToAdd)
|
||||
r.RecordRuleStateHistory(ctx, prevState, currentState, itemsToAdd)
|
||||
|
||||
r.health = ruletypes.HealthGood
|
||||
r.lastError = err
|
||||
|
||||
@@ -511,8 +511,7 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
}
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
querier, mockMetadataStore := prepareQuerierForMetrics(t, telemetryStore)
|
||||
mockMetadataStore.TypeMap["signoz_calls_total"] = metrictypes.SumType
|
||||
querier := prepareQuerierForMetrics(t, telemetryStore)
|
||||
|
||||
cols := []cmock.ColumnType{
|
||||
{Name: "ts", Type: "DateTime"},
|
||||
@@ -728,8 +727,7 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
WithArgs(nil, nil, nil, nil, nil, nil, nil, nil).
|
||||
WillReturnRows(rows)
|
||||
|
||||
querier, mockMetadataStore := prepareQuerierForMetrics(t, telemetryStore)
|
||||
mockMetadataStore.TypeMap["signoz_calls_total"] = metrictypes.SumType
|
||||
querier := prepareQuerierForMetrics(t, telemetryStore)
|
||||
|
||||
var target float64 = 0
|
||||
postableRule.RuleCondition.Thresholds = &ruletypes.RuleThresholdData{
|
||||
@@ -1117,8 +1115,7 @@ func TestMultipleThresholdRule(t *testing.T) {
|
||||
WithArgs(nil, nil, nil, nil, nil, nil, nil, nil).
|
||||
WillReturnRows(rows)
|
||||
|
||||
querier, mockMetadataStore := prepareQuerierForMetrics(t, telemetryStore)
|
||||
mockMetadataStore.TypeMap["signoz_calls_total"] = metrictypes.SumType
|
||||
querier := prepareQuerierForMetrics(t, telemetryStore)
|
||||
|
||||
postableRule.RuleCondition.CompareOperator = c.compareOperator
|
||||
postableRule.RuleCondition.MatchType = c.matchType
|
||||
@@ -1906,8 +1903,7 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
|
||||
WithArgs(nil, nil, nil, nil, nil, nil, nil, nil).
|
||||
WillReturnRows(rows)
|
||||
|
||||
querier, mockMetadataStore := prepareQuerierForMetrics(t, telemetryStore)
|
||||
mockMetadataStore.TypeMap["signoz_calls_total"] = metrictypes.SumType
|
||||
querier := prepareQuerierForMetrics(t, telemetryStore)
|
||||
|
||||
rc := postableRule.RuleCondition
|
||||
rc.Target = &c.target
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user