mirror of
https://github.com/SigNoz/signoz.git
synced 2026-04-18 01:40:28 +01:00
Compare commits
5 Commits
refactor/t
...
debug-wal
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
501ad64b9e | ||
|
|
0648cd4e18 | ||
|
|
6d1d028d4c | ||
|
|
92660b457d | ||
|
|
8bfadbc197 |
@@ -403,27 +403,65 @@ components:
|
|||||||
required:
|
required:
|
||||||
- regions
|
- regions
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesAWSCollectionStrategy:
|
CloudintegrationtypesAWSCloudWatchLogsSubscription:
|
||||||
properties:
|
properties:
|
||||||
aws_logs:
|
filterPattern:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAWSLogsStrategy'
|
type: string
|
||||||
aws_metrics:
|
logGroupNamePrefix:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAWSMetricsStrategy'
|
type: string
|
||||||
s3_buckets:
|
required:
|
||||||
additionalProperties:
|
- logGroupNamePrefix
|
||||||
items:
|
- filterPattern
|
||||||
type: string
|
type: object
|
||||||
type: array
|
CloudintegrationtypesAWSCloudWatchMetricStreamFilter:
|
||||||
type: object
|
properties:
|
||||||
|
metricNames:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
namespace:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- namespace
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesAWSConnectionArtifact:
|
CloudintegrationtypesAWSConnectionArtifact:
|
||||||
properties:
|
properties:
|
||||||
connectionURL:
|
connectionUrl:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
- connectionURL
|
- connectionUrl
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesAWSConnectionArtifactRequest:
|
CloudintegrationtypesAWSIntegrationConfig:
|
||||||
|
properties:
|
||||||
|
enabledRegions:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
telemetryCollectionStrategy:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSTelemetryCollectionStrategy'
|
||||||
|
required:
|
||||||
|
- enabledRegions
|
||||||
|
- telemetryCollectionStrategy
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesAWSLogsCollectionStrategy:
|
||||||
|
properties:
|
||||||
|
subscriptions:
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSCloudWatchLogsSubscription'
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- subscriptions
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesAWSMetricsCollectionStrategy:
|
||||||
|
properties:
|
||||||
|
streamFilters:
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSCloudWatchMetricStreamFilter'
|
||||||
|
type: array
|
||||||
|
required:
|
||||||
|
- streamFilters
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesAWSPostableAccountConfig:
|
||||||
properties:
|
properties:
|
||||||
deploymentRegion:
|
deploymentRegion:
|
||||||
type: string
|
type: string
|
||||||
@@ -435,46 +473,6 @@ components:
|
|||||||
- deploymentRegion
|
- deploymentRegion
|
||||||
- regions
|
- regions
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesAWSIntegrationConfig:
|
|
||||||
properties:
|
|
||||||
enabledRegions:
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
type: array
|
|
||||||
telemetry:
|
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
|
||||||
required:
|
|
||||||
- enabledRegions
|
|
||||||
- telemetry
|
|
||||||
type: object
|
|
||||||
CloudintegrationtypesAWSLogsStrategy:
|
|
||||||
properties:
|
|
||||||
cloudwatch_logs_subscriptions:
|
|
||||||
items:
|
|
||||||
properties:
|
|
||||||
filter_pattern:
|
|
||||||
type: string
|
|
||||||
log_group_name_prefix:
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
nullable: true
|
|
||||||
type: array
|
|
||||||
type: object
|
|
||||||
CloudintegrationtypesAWSMetricsStrategy:
|
|
||||||
properties:
|
|
||||||
cloudwatch_metric_stream_filters:
|
|
||||||
items:
|
|
||||||
properties:
|
|
||||||
MetricNames:
|
|
||||||
items:
|
|
||||||
type: string
|
|
||||||
type: array
|
|
||||||
Namespace:
|
|
||||||
type: string
|
|
||||||
type: object
|
|
||||||
nullable: true
|
|
||||||
type: array
|
|
||||||
type: object
|
|
||||||
CloudintegrationtypesAWSServiceConfig:
|
CloudintegrationtypesAWSServiceConfig:
|
||||||
properties:
|
properties:
|
||||||
logs:
|
logs:
|
||||||
@@ -486,7 +484,7 @@ components:
|
|||||||
properties:
|
properties:
|
||||||
enabled:
|
enabled:
|
||||||
type: boolean
|
type: boolean
|
||||||
s3_buckets:
|
s3Buckets:
|
||||||
additionalProperties:
|
additionalProperties:
|
||||||
items:
|
items:
|
||||||
type: string
|
type: string
|
||||||
@@ -498,6 +496,19 @@ components:
|
|||||||
enabled:
|
enabled:
|
||||||
type: boolean
|
type: boolean
|
||||||
type: object
|
type: object
|
||||||
|
CloudintegrationtypesAWSTelemetryCollectionStrategy:
|
||||||
|
properties:
|
||||||
|
logs:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSLogsCollectionStrategy'
|
||||||
|
metrics:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSMetricsCollectionStrategy'
|
||||||
|
s3Buckets:
|
||||||
|
additionalProperties:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
type: object
|
||||||
|
type: object
|
||||||
CloudintegrationtypesAccount:
|
CloudintegrationtypesAccount:
|
||||||
properties:
|
properties:
|
||||||
agentReport:
|
agentReport:
|
||||||
@@ -561,6 +572,26 @@ components:
|
|||||||
nullable: true
|
nullable: true
|
||||||
type: array
|
type: array
|
||||||
type: object
|
type: object
|
||||||
|
CloudintegrationtypesCloudIntegrationService:
|
||||||
|
nullable: true
|
||||||
|
properties:
|
||||||
|
cloudIntegrationId:
|
||||||
|
type: string
|
||||||
|
config:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesServiceConfig'
|
||||||
|
createdAt:
|
||||||
|
format: date-time
|
||||||
|
type: string
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesServiceID'
|
||||||
|
updatedAt:
|
||||||
|
format: date-time
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- id
|
||||||
|
type: object
|
||||||
CloudintegrationtypesCollectedLogAttribute:
|
CloudintegrationtypesCollectedLogAttribute:
|
||||||
properties:
|
properties:
|
||||||
name:
|
name:
|
||||||
@@ -581,13 +612,6 @@ components:
|
|||||||
unit:
|
unit:
|
||||||
type: string
|
type: string
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesCollectionStrategy:
|
|
||||||
properties:
|
|
||||||
aws:
|
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
|
||||||
required:
|
|
||||||
- aws
|
|
||||||
type: object
|
|
||||||
CloudintegrationtypesConnectionArtifact:
|
CloudintegrationtypesConnectionArtifact:
|
||||||
properties:
|
properties:
|
||||||
aws:
|
aws:
|
||||||
@@ -595,12 +619,21 @@ components:
|
|||||||
required:
|
required:
|
||||||
- aws
|
- aws
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesConnectionArtifactRequest:
|
CloudintegrationtypesCredentials:
|
||||||
properties:
|
properties:
|
||||||
aws:
|
ingestionKey:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAWSConnectionArtifactRequest'
|
type: string
|
||||||
|
ingestionUrl:
|
||||||
|
type: string
|
||||||
|
sigNozApiKey:
|
||||||
|
type: string
|
||||||
|
sigNozApiUrl:
|
||||||
|
type: string
|
||||||
required:
|
required:
|
||||||
- aws
|
- sigNozApiUrl
|
||||||
|
- sigNozApiKey
|
||||||
|
- ingestionUrl
|
||||||
|
- ingestionKey
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesDashboard:
|
CloudintegrationtypesDashboard:
|
||||||
properties:
|
properties:
|
||||||
@@ -626,7 +659,7 @@ components:
|
|||||||
nullable: true
|
nullable: true
|
||||||
type: array
|
type: array
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesGettableAccountWithArtifact:
|
CloudintegrationtypesGettableAccountWithConnectionArtifact:
|
||||||
properties:
|
properties:
|
||||||
connectionArtifact:
|
connectionArtifact:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesConnectionArtifact'
|
$ref: '#/components/schemas/CloudintegrationtypesConnectionArtifact'
|
||||||
@@ -645,7 +678,7 @@ components:
|
|||||||
required:
|
required:
|
||||||
- accounts
|
- accounts
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesGettableAgentCheckInResponse:
|
CloudintegrationtypesGettableAgentCheckIn:
|
||||||
properties:
|
properties:
|
||||||
account_id:
|
account_id:
|
||||||
type: string
|
type: string
|
||||||
@@ -694,12 +727,72 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
type: array
|
type: array
|
||||||
telemetry:
|
telemetry:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
$ref: '#/components/schemas/CloudintegrationtypesOldAWSCollectionStrategy'
|
||||||
required:
|
required:
|
||||||
- enabled_regions
|
- enabled_regions
|
||||||
- telemetry
|
- telemetry
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesPostableAgentCheckInRequest:
|
CloudintegrationtypesOldAWSCollectionStrategy:
|
||||||
|
properties:
|
||||||
|
aws_logs:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesOldAWSLogsStrategy'
|
||||||
|
aws_metrics:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesOldAWSMetricsStrategy'
|
||||||
|
provider:
|
||||||
|
type: string
|
||||||
|
s3_buckets:
|
||||||
|
additionalProperties:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
type: object
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesOldAWSLogsStrategy:
|
||||||
|
properties:
|
||||||
|
cloudwatch_logs_subscriptions:
|
||||||
|
items:
|
||||||
|
properties:
|
||||||
|
filter_pattern:
|
||||||
|
type: string
|
||||||
|
log_group_name_prefix:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
nullable: true
|
||||||
|
type: array
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesOldAWSMetricsStrategy:
|
||||||
|
properties:
|
||||||
|
cloudwatch_metric_stream_filters:
|
||||||
|
items:
|
||||||
|
properties:
|
||||||
|
MetricNames:
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
type: array
|
||||||
|
Namespace:
|
||||||
|
type: string
|
||||||
|
type: object
|
||||||
|
nullable: true
|
||||||
|
type: array
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesPostableAccount:
|
||||||
|
properties:
|
||||||
|
config:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesPostableAccountConfig'
|
||||||
|
credentials:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesCredentials'
|
||||||
|
required:
|
||||||
|
- config
|
||||||
|
- credentials
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesPostableAccountConfig:
|
||||||
|
properties:
|
||||||
|
aws:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSPostableAccountConfig'
|
||||||
|
required:
|
||||||
|
- aws
|
||||||
|
type: object
|
||||||
|
CloudintegrationtypesPostableAgentCheckIn:
|
||||||
properties:
|
properties:
|
||||||
account_id:
|
account_id:
|
||||||
type: string
|
type: string
|
||||||
@@ -727,6 +820,8 @@ components:
|
|||||||
properties:
|
properties:
|
||||||
assets:
|
assets:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesAssets'
|
$ref: '#/components/schemas/CloudintegrationtypesAssets'
|
||||||
|
cloudIntegrationService:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesCloudIntegrationService'
|
||||||
dataCollected:
|
dataCollected:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesDataCollected'
|
$ref: '#/components/schemas/CloudintegrationtypesDataCollected'
|
||||||
icon:
|
icon:
|
||||||
@@ -735,12 +830,10 @@ components:
|
|||||||
type: string
|
type: string
|
||||||
overview:
|
overview:
|
||||||
type: string
|
type: string
|
||||||
serviceConfig:
|
supportedSignals:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesServiceConfig'
|
|
||||||
supported_signals:
|
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesSupportedSignals'
|
$ref: '#/components/schemas/CloudintegrationtypesSupportedSignals'
|
||||||
telemetryCollectionStrategy:
|
telemetryCollectionStrategy:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesCollectionStrategy'
|
$ref: '#/components/schemas/CloudintegrationtypesTelemetryCollectionStrategy'
|
||||||
title:
|
title:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -749,9 +842,10 @@ components:
|
|||||||
- icon
|
- icon
|
||||||
- overview
|
- overview
|
||||||
- assets
|
- assets
|
||||||
- supported_signals
|
- supportedSignals
|
||||||
- dataCollected
|
- dataCollected
|
||||||
- telemetryCollectionStrategy
|
- telemetryCollectionStrategy
|
||||||
|
- cloudIntegrationService
|
||||||
type: object
|
type: object
|
||||||
CloudintegrationtypesServiceConfig:
|
CloudintegrationtypesServiceConfig:
|
||||||
properties:
|
properties:
|
||||||
@@ -760,6 +854,22 @@ components:
|
|||||||
required:
|
required:
|
||||||
- aws
|
- aws
|
||||||
type: object
|
type: object
|
||||||
|
CloudintegrationtypesServiceID:
|
||||||
|
enum:
|
||||||
|
- alb
|
||||||
|
- api-gateway
|
||||||
|
- dynamodb
|
||||||
|
- ec2
|
||||||
|
- ecs
|
||||||
|
- eks
|
||||||
|
- elasticache
|
||||||
|
- lambda
|
||||||
|
- msk
|
||||||
|
- rds
|
||||||
|
- s3sync
|
||||||
|
- sns
|
||||||
|
- sqs
|
||||||
|
type: string
|
||||||
CloudintegrationtypesServiceMetadata:
|
CloudintegrationtypesServiceMetadata:
|
||||||
properties:
|
properties:
|
||||||
enabled:
|
enabled:
|
||||||
@@ -783,6 +893,13 @@ components:
|
|||||||
metrics:
|
metrics:
|
||||||
type: boolean
|
type: boolean
|
||||||
type: object
|
type: object
|
||||||
|
CloudintegrationtypesTelemetryCollectionStrategy:
|
||||||
|
properties:
|
||||||
|
aws:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesAWSTelemetryCollectionStrategy'
|
||||||
|
required:
|
||||||
|
- aws
|
||||||
|
type: object
|
||||||
CloudintegrationtypesUpdatableAccount:
|
CloudintegrationtypesUpdatableAccount:
|
||||||
properties:
|
properties:
|
||||||
config:
|
config:
|
||||||
@@ -3081,7 +3198,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckInRequest'
|
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckIn'
|
||||||
responses:
|
responses:
|
||||||
"200":
|
"200":
|
||||||
content:
|
content:
|
||||||
@@ -3089,7 +3206,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckInResponse'
|
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckIn'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -3190,7 +3307,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesConnectionArtifactRequest'
|
$ref: '#/components/schemas/CloudintegrationtypesPostableAccount'
|
||||||
responses:
|
responses:
|
||||||
"200":
|
"200":
|
||||||
content:
|
content:
|
||||||
@@ -3198,7 +3315,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAccountWithArtifact'
|
$ref: '#/components/schemas/CloudintegrationtypesGettableAccountWithConnectionArtifact'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -3394,6 +3511,61 @@ paths:
|
|||||||
summary: Update account
|
summary: Update account
|
||||||
tags:
|
tags:
|
||||||
- cloudintegration
|
- cloudintegration
|
||||||
|
/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}/services/{service_id}:
|
||||||
|
put:
|
||||||
|
deprecated: false
|
||||||
|
description: This endpoint updates a service for the specified cloud provider
|
||||||
|
operationId: UpdateService
|
||||||
|
parameters:
|
||||||
|
- in: path
|
||||||
|
name: cloud_provider
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- in: path
|
||||||
|
name: id
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- in: path
|
||||||
|
name: service_id
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
requestBody:
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesUpdatableService'
|
||||||
|
responses:
|
||||||
|
"204":
|
||||||
|
description: No Content
|
||||||
|
"401":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
|
description: Unauthorized
|
||||||
|
"403":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
|
description: Forbidden
|
||||||
|
"500":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
|
description: Internal Server Error
|
||||||
|
security:
|
||||||
|
- api_key:
|
||||||
|
- ADMIN
|
||||||
|
- tokenizer:
|
||||||
|
- ADMIN
|
||||||
|
summary: Update service
|
||||||
|
tags:
|
||||||
|
- cloudintegration
|
||||||
/api/v1/cloud_integrations/{cloud_provider}/accounts/check_in:
|
/api/v1/cloud_integrations/{cloud_provider}/accounts/check_in:
|
||||||
post:
|
post:
|
||||||
deprecated: false
|
deprecated: false
|
||||||
@@ -3409,7 +3581,7 @@ paths:
|
|||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckInRequest'
|
$ref: '#/components/schemas/CloudintegrationtypesPostableAgentCheckIn'
|
||||||
responses:
|
responses:
|
||||||
"200":
|
"200":
|
||||||
content:
|
content:
|
||||||
@@ -3417,7 +3589,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
properties:
|
properties:
|
||||||
data:
|
data:
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckInResponse'
|
$ref: '#/components/schemas/CloudintegrationtypesGettableAgentCheckIn'
|
||||||
status:
|
status:
|
||||||
type: string
|
type: string
|
||||||
required:
|
required:
|
||||||
@@ -3451,6 +3623,59 @@ paths:
|
|||||||
summary: Agent check-in
|
summary: Agent check-in
|
||||||
tags:
|
tags:
|
||||||
- cloudintegration
|
- cloudintegration
|
||||||
|
/api/v1/cloud_integrations/{cloud_provider}/credentials:
|
||||||
|
get:
|
||||||
|
deprecated: false
|
||||||
|
description: This endpoint retrieves the connection credentials required for
|
||||||
|
integration
|
||||||
|
operationId: GetConnectionCredentials
|
||||||
|
parameters:
|
||||||
|
- in: path
|
||||||
|
name: cloud_provider
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
"200":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
properties:
|
||||||
|
data:
|
||||||
|
$ref: '#/components/schemas/CloudintegrationtypesCredentials'
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
required:
|
||||||
|
- status
|
||||||
|
- data
|
||||||
|
type: object
|
||||||
|
description: OK
|
||||||
|
"401":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
|
description: Unauthorized
|
||||||
|
"403":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
|
description: Forbidden
|
||||||
|
"500":
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RenderErrorResponse'
|
||||||
|
description: Internal Server Error
|
||||||
|
security:
|
||||||
|
- api_key:
|
||||||
|
- ADMIN
|
||||||
|
- tokenizer:
|
||||||
|
- ADMIN
|
||||||
|
summary: Get connection credentials
|
||||||
|
tags:
|
||||||
|
- cloudintegration
|
||||||
/api/v1/cloud_integrations/{cloud_provider}/services:
|
/api/v1/cloud_integrations/{cloud_provider}/services:
|
||||||
get:
|
get:
|
||||||
deprecated: false
|
deprecated: false
|
||||||
@@ -3561,55 +3786,6 @@ paths:
|
|||||||
summary: Get service
|
summary: Get service
|
||||||
tags:
|
tags:
|
||||||
- cloudintegration
|
- cloudintegration
|
||||||
put:
|
|
||||||
deprecated: false
|
|
||||||
description: This endpoint updates a service for the specified cloud provider
|
|
||||||
operationId: UpdateService
|
|
||||||
parameters:
|
|
||||||
- in: path
|
|
||||||
name: cloud_provider
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
- in: path
|
|
||||||
name: service_id
|
|
||||||
required: true
|
|
||||||
schema:
|
|
||||||
type: string
|
|
||||||
requestBody:
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/CloudintegrationtypesUpdatableService'
|
|
||||||
responses:
|
|
||||||
"204":
|
|
||||||
description: No Content
|
|
||||||
"401":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Unauthorized
|
|
||||||
"403":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Forbidden
|
|
||||||
"500":
|
|
||||||
content:
|
|
||||||
application/json:
|
|
||||||
schema:
|
|
||||||
$ref: '#/components/schemas/RenderErrorResponse'
|
|
||||||
description: Internal Server Error
|
|
||||||
security:
|
|
||||||
- api_key:
|
|
||||||
- ADMIN
|
|
||||||
- tokenizer:
|
|
||||||
- ADMIN
|
|
||||||
summary: Update service
|
|
||||||
tags:
|
|
||||||
- cloudintegration
|
|
||||||
/api/v1/complete/google:
|
/api/v1/complete/google:
|
||||||
get:
|
get:
|
||||||
deprecated: false
|
deprecated: false
|
||||||
|
|||||||
@@ -24,8 +24,8 @@ import type {
|
|||||||
AgentCheckInDeprecated200,
|
AgentCheckInDeprecated200,
|
||||||
AgentCheckInDeprecatedPathParameters,
|
AgentCheckInDeprecatedPathParameters,
|
||||||
AgentCheckInPathParameters,
|
AgentCheckInPathParameters,
|
||||||
CloudintegrationtypesConnectionArtifactRequestDTO,
|
CloudintegrationtypesPostableAccountDTO,
|
||||||
CloudintegrationtypesPostableAgentCheckInRequestDTO,
|
CloudintegrationtypesPostableAgentCheckInDTO,
|
||||||
CloudintegrationtypesUpdatableAccountDTO,
|
CloudintegrationtypesUpdatableAccountDTO,
|
||||||
CloudintegrationtypesUpdatableServiceDTO,
|
CloudintegrationtypesUpdatableServiceDTO,
|
||||||
CreateAccount200,
|
CreateAccount200,
|
||||||
@@ -33,6 +33,8 @@ import type {
|
|||||||
DisconnectAccountPathParameters,
|
DisconnectAccountPathParameters,
|
||||||
GetAccount200,
|
GetAccount200,
|
||||||
GetAccountPathParameters,
|
GetAccountPathParameters,
|
||||||
|
GetConnectionCredentials200,
|
||||||
|
GetConnectionCredentialsPathParameters,
|
||||||
GetService200,
|
GetService200,
|
||||||
GetServicePathParameters,
|
GetServicePathParameters,
|
||||||
ListAccounts200,
|
ListAccounts200,
|
||||||
@@ -51,14 +53,14 @@ import type {
|
|||||||
*/
|
*/
|
||||||
export const agentCheckInDeprecated = (
|
export const agentCheckInDeprecated = (
|
||||||
{ cloudProvider }: AgentCheckInDeprecatedPathParameters,
|
{ cloudProvider }: AgentCheckInDeprecatedPathParameters,
|
||||||
cloudintegrationtypesPostableAgentCheckInRequestDTO: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>,
|
cloudintegrationtypesPostableAgentCheckInDTO: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
) => {
|
) => {
|
||||||
return GeneratedAPIInstance<AgentCheckInDeprecated200>({
|
return GeneratedAPIInstance<AgentCheckInDeprecated200>({
|
||||||
url: `/api/v1/cloud-integrations/${cloudProvider}/agent-check-in`,
|
url: `/api/v1/cloud-integrations/${cloudProvider}/agent-check-in`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
data: cloudintegrationtypesPostableAgentCheckInRequestDTO,
|
data: cloudintegrationtypesPostableAgentCheckInDTO,
|
||||||
signal,
|
signal,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -72,7 +74,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -81,7 +83,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -98,7 +100,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
|||||||
Awaited<ReturnType<typeof agentCheckInDeprecated>>,
|
Awaited<ReturnType<typeof agentCheckInDeprecated>>,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
}
|
}
|
||||||
> = (props) => {
|
> = (props) => {
|
||||||
const { pathParams, data } = props ?? {};
|
const { pathParams, data } = props ?? {};
|
||||||
@@ -112,7 +114,7 @@ export const getAgentCheckInDeprecatedMutationOptions = <
|
|||||||
export type AgentCheckInDeprecatedMutationResult = NonNullable<
|
export type AgentCheckInDeprecatedMutationResult = NonNullable<
|
||||||
Awaited<ReturnType<typeof agentCheckInDeprecated>>
|
Awaited<ReturnType<typeof agentCheckInDeprecated>>
|
||||||
>;
|
>;
|
||||||
export type AgentCheckInDeprecatedMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
export type AgentCheckInDeprecatedMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
export type AgentCheckInDeprecatedMutationError = ErrorType<RenderErrorResponseDTO>;
|
export type AgentCheckInDeprecatedMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -128,7 +130,7 @@ export const useAgentCheckInDeprecated = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -137,7 +139,7 @@ export const useAgentCheckInDeprecated = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInDeprecatedPathParameters;
|
pathParams: AgentCheckInDeprecatedPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -255,14 +257,14 @@ export const invalidateListAccounts = async (
|
|||||||
*/
|
*/
|
||||||
export const createAccount = (
|
export const createAccount = (
|
||||||
{ cloudProvider }: CreateAccountPathParameters,
|
{ cloudProvider }: CreateAccountPathParameters,
|
||||||
cloudintegrationtypesConnectionArtifactRequestDTO: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>,
|
cloudintegrationtypesPostableAccountDTO: BodyType<CloudintegrationtypesPostableAccountDTO>,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
) => {
|
) => {
|
||||||
return GeneratedAPIInstance<CreateAccount200>({
|
return GeneratedAPIInstance<CreateAccount200>({
|
||||||
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts`,
|
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
data: cloudintegrationtypesConnectionArtifactRequestDTO,
|
data: cloudintegrationtypesPostableAccountDTO,
|
||||||
signal,
|
signal,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -276,7 +278,7 @@ export const getCreateAccountMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: CreateAccountPathParameters;
|
pathParams: CreateAccountPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -285,7 +287,7 @@ export const getCreateAccountMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: CreateAccountPathParameters;
|
pathParams: CreateAccountPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -302,7 +304,7 @@ export const getCreateAccountMutationOptions = <
|
|||||||
Awaited<ReturnType<typeof createAccount>>,
|
Awaited<ReturnType<typeof createAccount>>,
|
||||||
{
|
{
|
||||||
pathParams: CreateAccountPathParameters;
|
pathParams: CreateAccountPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||||
}
|
}
|
||||||
> = (props) => {
|
> = (props) => {
|
||||||
const { pathParams, data } = props ?? {};
|
const { pathParams, data } = props ?? {};
|
||||||
@@ -316,7 +318,7 @@ export const getCreateAccountMutationOptions = <
|
|||||||
export type CreateAccountMutationResult = NonNullable<
|
export type CreateAccountMutationResult = NonNullable<
|
||||||
Awaited<ReturnType<typeof createAccount>>
|
Awaited<ReturnType<typeof createAccount>>
|
||||||
>;
|
>;
|
||||||
export type CreateAccountMutationBody = BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
export type CreateAccountMutationBody = BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||||
export type CreateAccountMutationError = ErrorType<RenderErrorResponseDTO>;
|
export type CreateAccountMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -331,7 +333,7 @@ export const useCreateAccount = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: CreateAccountPathParameters;
|
pathParams: CreateAccountPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -340,7 +342,7 @@ export const useCreateAccount = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: CreateAccountPathParameters;
|
pathParams: CreateAccountPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesConnectionArtifactRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAccountDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -628,20 +630,117 @@ export const useUpdateAccount = <
|
|||||||
|
|
||||||
return useMutation(mutationOptions);
|
return useMutation(mutationOptions);
|
||||||
};
|
};
|
||||||
|
/**
|
||||||
|
* This endpoint updates a service for the specified cloud provider
|
||||||
|
* @summary Update service
|
||||||
|
*/
|
||||||
|
export const updateService = (
|
||||||
|
{ cloudProvider, id, serviceId }: UpdateServicePathParameters,
|
||||||
|
cloudintegrationtypesUpdatableServiceDTO: BodyType<CloudintegrationtypesUpdatableServiceDTO>,
|
||||||
|
) => {
|
||||||
|
return GeneratedAPIInstance<void>({
|
||||||
|
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts/${id}/services/${serviceId}`,
|
||||||
|
method: 'PUT',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
data: cloudintegrationtypesUpdatableServiceDTO,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getUpdateServiceMutationOptions = <
|
||||||
|
TError = ErrorType<RenderErrorResponseDTO>,
|
||||||
|
TContext = unknown
|
||||||
|
>(options?: {
|
||||||
|
mutation?: UseMutationOptions<
|
||||||
|
Awaited<ReturnType<typeof updateService>>,
|
||||||
|
TError,
|
||||||
|
{
|
||||||
|
pathParams: UpdateServicePathParameters;
|
||||||
|
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||||
|
},
|
||||||
|
TContext
|
||||||
|
>;
|
||||||
|
}): UseMutationOptions<
|
||||||
|
Awaited<ReturnType<typeof updateService>>,
|
||||||
|
TError,
|
||||||
|
{
|
||||||
|
pathParams: UpdateServicePathParameters;
|
||||||
|
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||||
|
},
|
||||||
|
TContext
|
||||||
|
> => {
|
||||||
|
const mutationKey = ['updateService'];
|
||||||
|
const { mutation: mutationOptions } = options
|
||||||
|
? options.mutation &&
|
||||||
|
'mutationKey' in options.mutation &&
|
||||||
|
options.mutation.mutationKey
|
||||||
|
? options
|
||||||
|
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||||
|
: { mutation: { mutationKey } };
|
||||||
|
|
||||||
|
const mutationFn: MutationFunction<
|
||||||
|
Awaited<ReturnType<typeof updateService>>,
|
||||||
|
{
|
||||||
|
pathParams: UpdateServicePathParameters;
|
||||||
|
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||||
|
}
|
||||||
|
> = (props) => {
|
||||||
|
const { pathParams, data } = props ?? {};
|
||||||
|
|
||||||
|
return updateService(pathParams, data);
|
||||||
|
};
|
||||||
|
|
||||||
|
return { mutationFn, ...mutationOptions };
|
||||||
|
};
|
||||||
|
|
||||||
|
export type UpdateServiceMutationResult = NonNullable<
|
||||||
|
Awaited<ReturnType<typeof updateService>>
|
||||||
|
>;
|
||||||
|
export type UpdateServiceMutationBody = BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||||
|
export type UpdateServiceMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @summary Update service
|
||||||
|
*/
|
||||||
|
export const useUpdateService = <
|
||||||
|
TError = ErrorType<RenderErrorResponseDTO>,
|
||||||
|
TContext = unknown
|
||||||
|
>(options?: {
|
||||||
|
mutation?: UseMutationOptions<
|
||||||
|
Awaited<ReturnType<typeof updateService>>,
|
||||||
|
TError,
|
||||||
|
{
|
||||||
|
pathParams: UpdateServicePathParameters;
|
||||||
|
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||||
|
},
|
||||||
|
TContext
|
||||||
|
>;
|
||||||
|
}): UseMutationResult<
|
||||||
|
Awaited<ReturnType<typeof updateService>>,
|
||||||
|
TError,
|
||||||
|
{
|
||||||
|
pathParams: UpdateServicePathParameters;
|
||||||
|
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||||
|
},
|
||||||
|
TContext
|
||||||
|
> => {
|
||||||
|
const mutationOptions = getUpdateServiceMutationOptions(options);
|
||||||
|
|
||||||
|
return useMutation(mutationOptions);
|
||||||
|
};
|
||||||
/**
|
/**
|
||||||
* This endpoint is called by the deployed agent to check in
|
* This endpoint is called by the deployed agent to check in
|
||||||
* @summary Agent check-in
|
* @summary Agent check-in
|
||||||
*/
|
*/
|
||||||
export const agentCheckIn = (
|
export const agentCheckIn = (
|
||||||
{ cloudProvider }: AgentCheckInPathParameters,
|
{ cloudProvider }: AgentCheckInPathParameters,
|
||||||
cloudintegrationtypesPostableAgentCheckInRequestDTO: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>,
|
cloudintegrationtypesPostableAgentCheckInDTO: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>,
|
||||||
signal?: AbortSignal,
|
signal?: AbortSignal,
|
||||||
) => {
|
) => {
|
||||||
return GeneratedAPIInstance<AgentCheckIn200>({
|
return GeneratedAPIInstance<AgentCheckIn200>({
|
||||||
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts/check_in`,
|
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts/check_in`,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: { 'Content-Type': 'application/json' },
|
headers: { 'Content-Type': 'application/json' },
|
||||||
data: cloudintegrationtypesPostableAgentCheckInRequestDTO,
|
data: cloudintegrationtypesPostableAgentCheckInDTO,
|
||||||
signal,
|
signal,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -655,7 +754,7 @@ export const getAgentCheckInMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInPathParameters;
|
pathParams: AgentCheckInPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -664,7 +763,7 @@ export const getAgentCheckInMutationOptions = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInPathParameters;
|
pathParams: AgentCheckInPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -681,7 +780,7 @@ export const getAgentCheckInMutationOptions = <
|
|||||||
Awaited<ReturnType<typeof agentCheckIn>>,
|
Awaited<ReturnType<typeof agentCheckIn>>,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInPathParameters;
|
pathParams: AgentCheckInPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
}
|
}
|
||||||
> = (props) => {
|
> = (props) => {
|
||||||
const { pathParams, data } = props ?? {};
|
const { pathParams, data } = props ?? {};
|
||||||
@@ -695,7 +794,7 @@ export const getAgentCheckInMutationOptions = <
|
|||||||
export type AgentCheckInMutationResult = NonNullable<
|
export type AgentCheckInMutationResult = NonNullable<
|
||||||
Awaited<ReturnType<typeof agentCheckIn>>
|
Awaited<ReturnType<typeof agentCheckIn>>
|
||||||
>;
|
>;
|
||||||
export type AgentCheckInMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
export type AgentCheckInMutationBody = BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
export type AgentCheckInMutationError = ErrorType<RenderErrorResponseDTO>;
|
export type AgentCheckInMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -710,7 +809,7 @@ export const useAgentCheckIn = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInPathParameters;
|
pathParams: AgentCheckInPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
>;
|
>;
|
||||||
@@ -719,7 +818,7 @@ export const useAgentCheckIn = <
|
|||||||
TError,
|
TError,
|
||||||
{
|
{
|
||||||
pathParams: AgentCheckInPathParameters;
|
pathParams: AgentCheckInPathParameters;
|
||||||
data: BodyType<CloudintegrationtypesPostableAgentCheckInRequestDTO>;
|
data: BodyType<CloudintegrationtypesPostableAgentCheckInDTO>;
|
||||||
},
|
},
|
||||||
TContext
|
TContext
|
||||||
> => {
|
> => {
|
||||||
@@ -727,6 +826,114 @@ export const useAgentCheckIn = <
|
|||||||
|
|
||||||
return useMutation(mutationOptions);
|
return useMutation(mutationOptions);
|
||||||
};
|
};
|
||||||
|
/**
|
||||||
|
* This endpoint retrieves the connection credentials required for integration
|
||||||
|
* @summary Get connection credentials
|
||||||
|
*/
|
||||||
|
export const getConnectionCredentials = (
|
||||||
|
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||||
|
signal?: AbortSignal,
|
||||||
|
) => {
|
||||||
|
return GeneratedAPIInstance<GetConnectionCredentials200>({
|
||||||
|
url: `/api/v1/cloud_integrations/${cloudProvider}/credentials`,
|
||||||
|
method: 'GET',
|
||||||
|
signal,
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getGetConnectionCredentialsQueryKey = ({
|
||||||
|
cloudProvider,
|
||||||
|
}: GetConnectionCredentialsPathParameters) => {
|
||||||
|
return [`/api/v1/cloud_integrations/${cloudProvider}/credentials`] as const;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getGetConnectionCredentialsQueryOptions = <
|
||||||
|
TData = Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||||
|
TError = ErrorType<RenderErrorResponseDTO>
|
||||||
|
>(
|
||||||
|
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||||
|
options?: {
|
||||||
|
query?: UseQueryOptions<
|
||||||
|
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||||
|
TError,
|
||||||
|
TData
|
||||||
|
>;
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
const { query: queryOptions } = options ?? {};
|
||||||
|
|
||||||
|
const queryKey =
|
||||||
|
queryOptions?.queryKey ??
|
||||||
|
getGetConnectionCredentialsQueryKey({ cloudProvider });
|
||||||
|
|
||||||
|
const queryFn: QueryFunction<
|
||||||
|
Awaited<ReturnType<typeof getConnectionCredentials>>
|
||||||
|
> = ({ signal }) => getConnectionCredentials({ cloudProvider }, signal);
|
||||||
|
|
||||||
|
return {
|
||||||
|
queryKey,
|
||||||
|
queryFn,
|
||||||
|
enabled: !!cloudProvider,
|
||||||
|
...queryOptions,
|
||||||
|
} as UseQueryOptions<
|
||||||
|
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||||
|
TError,
|
||||||
|
TData
|
||||||
|
> & { queryKey: QueryKey };
|
||||||
|
};
|
||||||
|
|
||||||
|
export type GetConnectionCredentialsQueryResult = NonNullable<
|
||||||
|
Awaited<ReturnType<typeof getConnectionCredentials>>
|
||||||
|
>;
|
||||||
|
export type GetConnectionCredentialsQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @summary Get connection credentials
|
||||||
|
*/
|
||||||
|
|
||||||
|
export function useGetConnectionCredentials<
|
||||||
|
TData = Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||||
|
TError = ErrorType<RenderErrorResponseDTO>
|
||||||
|
>(
|
||||||
|
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||||
|
options?: {
|
||||||
|
query?: UseQueryOptions<
|
||||||
|
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||||
|
TError,
|
||||||
|
TData
|
||||||
|
>;
|
||||||
|
},
|
||||||
|
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||||
|
const queryOptions = getGetConnectionCredentialsQueryOptions(
|
||||||
|
{ cloudProvider },
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
|
||||||
|
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||||
|
queryKey: QueryKey;
|
||||||
|
};
|
||||||
|
|
||||||
|
query.queryKey = queryOptions.queryKey;
|
||||||
|
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @summary Get connection credentials
|
||||||
|
*/
|
||||||
|
export const invalidateGetConnectionCredentials = async (
|
||||||
|
queryClient: QueryClient,
|
||||||
|
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||||
|
options?: InvalidateOptions,
|
||||||
|
): Promise<QueryClient> => {
|
||||||
|
await queryClient.invalidateQueries(
|
||||||
|
{ queryKey: getGetConnectionCredentialsQueryKey({ cloudProvider }) },
|
||||||
|
options,
|
||||||
|
);
|
||||||
|
|
||||||
|
return queryClient;
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This endpoint lists the services metadata for the specified cloud provider
|
* This endpoint lists the services metadata for the specified cloud provider
|
||||||
* @summary List services metadata
|
* @summary List services metadata
|
||||||
@@ -941,101 +1148,3 @@ export const invalidateGetService = async (
|
|||||||
|
|
||||||
return queryClient;
|
return queryClient;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* This endpoint updates a service for the specified cloud provider
|
|
||||||
* @summary Update service
|
|
||||||
*/
|
|
||||||
export const updateService = (
|
|
||||||
{ cloudProvider, serviceId }: UpdateServicePathParameters,
|
|
||||||
cloudintegrationtypesUpdatableServiceDTO: BodyType<CloudintegrationtypesUpdatableServiceDTO>,
|
|
||||||
) => {
|
|
||||||
return GeneratedAPIInstance<void>({
|
|
||||||
url: `/api/v1/cloud_integrations/${cloudProvider}/services/${serviceId}`,
|
|
||||||
method: 'PUT',
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
data: cloudintegrationtypesUpdatableServiceDTO,
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
export const getUpdateServiceMutationOptions = <
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>,
|
|
||||||
TContext = unknown
|
|
||||||
>(options?: {
|
|
||||||
mutation?: UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof updateService>>,
|
|
||||||
TError,
|
|
||||||
{
|
|
||||||
pathParams: UpdateServicePathParameters;
|
|
||||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
|
||||||
},
|
|
||||||
TContext
|
|
||||||
>;
|
|
||||||
}): UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof updateService>>,
|
|
||||||
TError,
|
|
||||||
{
|
|
||||||
pathParams: UpdateServicePathParameters;
|
|
||||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
|
||||||
},
|
|
||||||
TContext
|
|
||||||
> => {
|
|
||||||
const mutationKey = ['updateService'];
|
|
||||||
const { mutation: mutationOptions } = options
|
|
||||||
? options.mutation &&
|
|
||||||
'mutationKey' in options.mutation &&
|
|
||||||
options.mutation.mutationKey
|
|
||||||
? options
|
|
||||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
|
||||||
: { mutation: { mutationKey } };
|
|
||||||
|
|
||||||
const mutationFn: MutationFunction<
|
|
||||||
Awaited<ReturnType<typeof updateService>>,
|
|
||||||
{
|
|
||||||
pathParams: UpdateServicePathParameters;
|
|
||||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
|
||||||
}
|
|
||||||
> = (props) => {
|
|
||||||
const { pathParams, data } = props ?? {};
|
|
||||||
|
|
||||||
return updateService(pathParams, data);
|
|
||||||
};
|
|
||||||
|
|
||||||
return { mutationFn, ...mutationOptions };
|
|
||||||
};
|
|
||||||
|
|
||||||
export type UpdateServiceMutationResult = NonNullable<
|
|
||||||
Awaited<ReturnType<typeof updateService>>
|
|
||||||
>;
|
|
||||||
export type UpdateServiceMutationBody = BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
|
||||||
export type UpdateServiceMutationError = ErrorType<RenderErrorResponseDTO>;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @summary Update service
|
|
||||||
*/
|
|
||||||
export const useUpdateService = <
|
|
||||||
TError = ErrorType<RenderErrorResponseDTO>,
|
|
||||||
TContext = unknown
|
|
||||||
>(options?: {
|
|
||||||
mutation?: UseMutationOptions<
|
|
||||||
Awaited<ReturnType<typeof updateService>>,
|
|
||||||
TError,
|
|
||||||
{
|
|
||||||
pathParams: UpdateServicePathParameters;
|
|
||||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
|
||||||
},
|
|
||||||
TContext
|
|
||||||
>;
|
|
||||||
}): UseMutationResult<
|
|
||||||
Awaited<ReturnType<typeof updateService>>,
|
|
||||||
TError,
|
|
||||||
{
|
|
||||||
pathParams: UpdateServicePathParameters;
|
|
||||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
|
||||||
},
|
|
||||||
TContext
|
|
||||||
> => {
|
|
||||||
const mutationOptions = getUpdateServiceMutationOptions(options);
|
|
||||||
|
|
||||||
return useMutation(mutationOptions);
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -512,27 +512,58 @@ export interface CloudintegrationtypesAWSAccountConfigDTO {
|
|||||||
regions: string[];
|
regions: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export type CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets = {
|
export interface CloudintegrationtypesAWSCloudWatchLogsSubscriptionDTO {
|
||||||
[key: string]: string[];
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSCollectionStrategyDTO {
|
|
||||||
aws_logs?: CloudintegrationtypesAWSLogsStrategyDTO;
|
|
||||||
aws_metrics?: CloudintegrationtypesAWSMetricsStrategyDTO;
|
|
||||||
/**
|
/**
|
||||||
* @type object
|
* @type string
|
||||||
*/
|
*/
|
||||||
s3_buckets?: CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets;
|
filterPattern: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
logGroupNamePrefix: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesAWSCloudWatchMetricStreamFilterDTO {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
*/
|
||||||
|
metricNames?: string[];
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
namespace: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSConnectionArtifactDTO {
|
export interface CloudintegrationtypesAWSConnectionArtifactDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
connectionURL: string;
|
connectionUrl: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSConnectionArtifactRequestDTO {
|
export interface CloudintegrationtypesAWSIntegrationConfigDTO {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
*/
|
||||||
|
enabledRegions: string[];
|
||||||
|
telemetryCollectionStrategy: CloudintegrationtypesAWSTelemetryCollectionStrategyDTO;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesAWSLogsCollectionStrategyDTO {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
*/
|
||||||
|
subscriptions: CloudintegrationtypesAWSCloudWatchLogsSubscriptionDTO[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesAWSMetricsCollectionStrategyDTO {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
*/
|
||||||
|
streamFilters: CloudintegrationtypesAWSCloudWatchMetricStreamFilterDTO[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesAWSPostableAccountConfigDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -543,56 +574,6 @@ export interface CloudintegrationtypesAWSConnectionArtifactRequestDTO {
|
|||||||
regions: string[];
|
regions: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSIntegrationConfigDTO {
|
|
||||||
/**
|
|
||||||
* @type array
|
|
||||||
*/
|
|
||||||
enabledRegions: string[];
|
|
||||||
telemetry: CloudintegrationtypesAWSCollectionStrategyDTO;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type CloudintegrationtypesAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem = {
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
filter_pattern?: string;
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
log_group_name_prefix?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSLogsStrategyDTO {
|
|
||||||
/**
|
|
||||||
* @type array
|
|
||||||
* @nullable true
|
|
||||||
*/
|
|
||||||
cloudwatch_logs_subscriptions?:
|
|
||||||
| CloudintegrationtypesAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem[]
|
|
||||||
| null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type CloudintegrationtypesAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem = {
|
|
||||||
/**
|
|
||||||
* @type array
|
|
||||||
*/
|
|
||||||
MetricNames?: string[];
|
|
||||||
/**
|
|
||||||
* @type string
|
|
||||||
*/
|
|
||||||
Namespace?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSMetricsStrategyDTO {
|
|
||||||
/**
|
|
||||||
* @type array
|
|
||||||
* @nullable true
|
|
||||||
*/
|
|
||||||
cloudwatch_metric_stream_filters?:
|
|
||||||
| CloudintegrationtypesAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem[]
|
|
||||||
| null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSServiceConfigDTO {
|
export interface CloudintegrationtypesAWSServiceConfigDTO {
|
||||||
logs?: CloudintegrationtypesAWSServiceLogsConfigDTO;
|
logs?: CloudintegrationtypesAWSServiceLogsConfigDTO;
|
||||||
metrics?: CloudintegrationtypesAWSServiceMetricsConfigDTO;
|
metrics?: CloudintegrationtypesAWSServiceMetricsConfigDTO;
|
||||||
@@ -610,7 +591,7 @@ export interface CloudintegrationtypesAWSServiceLogsConfigDTO {
|
|||||||
/**
|
/**
|
||||||
* @type object
|
* @type object
|
||||||
*/
|
*/
|
||||||
s3_buckets?: CloudintegrationtypesAWSServiceLogsConfigDTOS3Buckets;
|
s3Buckets?: CloudintegrationtypesAWSServiceLogsConfigDTOS3Buckets;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesAWSServiceMetricsConfigDTO {
|
export interface CloudintegrationtypesAWSServiceMetricsConfigDTO {
|
||||||
@@ -620,6 +601,19 @@ export interface CloudintegrationtypesAWSServiceMetricsConfigDTO {
|
|||||||
enabled?: boolean;
|
enabled?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type CloudintegrationtypesAWSTelemetryCollectionStrategyDTOS3Buckets = {
|
||||||
|
[key: string]: string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesAWSTelemetryCollectionStrategyDTO {
|
||||||
|
logs?: CloudintegrationtypesAWSLogsCollectionStrategyDTO;
|
||||||
|
metrics?: CloudintegrationtypesAWSMetricsCollectionStrategyDTO;
|
||||||
|
/**
|
||||||
|
* @type object
|
||||||
|
*/
|
||||||
|
s3Buckets?: CloudintegrationtypesAWSTelemetryCollectionStrategyDTOS3Buckets;
|
||||||
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesAccountDTO {
|
export interface CloudintegrationtypesAccountDTO {
|
||||||
agentReport: CloudintegrationtypesAgentReportDTO;
|
agentReport: CloudintegrationtypesAgentReportDTO;
|
||||||
config: CloudintegrationtypesAccountConfigDTO;
|
config: CloudintegrationtypesAccountConfigDTO;
|
||||||
@@ -693,6 +687,32 @@ export interface CloudintegrationtypesAssetsDTO {
|
|||||||
dashboards?: CloudintegrationtypesDashboardDTO[] | null;
|
dashboards?: CloudintegrationtypesDashboardDTO[] | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @nullable
|
||||||
|
*/
|
||||||
|
export type CloudintegrationtypesCloudIntegrationServiceDTO = {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
cloudIntegrationId?: string;
|
||||||
|
config?: CloudintegrationtypesServiceConfigDTO;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @format date-time
|
||||||
|
*/
|
||||||
|
createdAt?: Date;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
id: string;
|
||||||
|
type?: CloudintegrationtypesServiceIDDTO;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
* @format date-time
|
||||||
|
*/
|
||||||
|
updatedAt?: Date;
|
||||||
|
} | null;
|
||||||
|
|
||||||
export interface CloudintegrationtypesCollectedLogAttributeDTO {
|
export interface CloudintegrationtypesCollectedLogAttributeDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -727,16 +747,27 @@ export interface CloudintegrationtypesCollectedMetricDTO {
|
|||||||
unit?: string;
|
unit?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesCollectionStrategyDTO {
|
|
||||||
aws: CloudintegrationtypesAWSCollectionStrategyDTO;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CloudintegrationtypesConnectionArtifactDTO {
|
export interface CloudintegrationtypesConnectionArtifactDTO {
|
||||||
aws: CloudintegrationtypesAWSConnectionArtifactDTO;
|
aws: CloudintegrationtypesAWSConnectionArtifactDTO;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesConnectionArtifactRequestDTO {
|
export interface CloudintegrationtypesCredentialsDTO {
|
||||||
aws: CloudintegrationtypesAWSConnectionArtifactRequestDTO;
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
ingestionKey: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
ingestionUrl: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
sigNozApiKey: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
sigNozApiUrl: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesDashboardDTO {
|
export interface CloudintegrationtypesDashboardDTO {
|
||||||
@@ -768,7 +799,7 @@ export interface CloudintegrationtypesDataCollectedDTO {
|
|||||||
metrics?: CloudintegrationtypesCollectedMetricDTO[] | null;
|
metrics?: CloudintegrationtypesCollectedMetricDTO[] | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesGettableAccountWithArtifactDTO {
|
export interface CloudintegrationtypesGettableAccountWithConnectionArtifactDTO {
|
||||||
connectionArtifact: CloudintegrationtypesConnectionArtifactDTO;
|
connectionArtifact: CloudintegrationtypesConnectionArtifactDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -783,7 +814,7 @@ export interface CloudintegrationtypesGettableAccountsDTO {
|
|||||||
accounts: CloudintegrationtypesAccountDTO[];
|
accounts: CloudintegrationtypesAccountDTO[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesGettableAgentCheckInResponseDTO {
|
export interface CloudintegrationtypesGettableAgentCheckInDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -831,17 +862,85 @@ export type CloudintegrationtypesIntegrationConfigDTO = {
|
|||||||
* @type array
|
* @type array
|
||||||
*/
|
*/
|
||||||
enabled_regions: string[];
|
enabled_regions: string[];
|
||||||
telemetry: CloudintegrationtypesAWSCollectionStrategyDTO;
|
telemetry: CloudintegrationtypesOldAWSCollectionStrategyDTO;
|
||||||
} | null;
|
} | null;
|
||||||
|
|
||||||
|
export type CloudintegrationtypesOldAWSCollectionStrategyDTOS3Buckets = {
|
||||||
|
[key: string]: string[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesOldAWSCollectionStrategyDTO {
|
||||||
|
aws_logs?: CloudintegrationtypesOldAWSLogsStrategyDTO;
|
||||||
|
aws_metrics?: CloudintegrationtypesOldAWSMetricsStrategyDTO;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
provider?: string;
|
||||||
|
/**
|
||||||
|
* @type object
|
||||||
|
*/
|
||||||
|
s3_buckets?: CloudintegrationtypesOldAWSCollectionStrategyDTOS3Buckets;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CloudintegrationtypesOldAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem = {
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
filter_pattern?: string;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
log_group_name_prefix?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesOldAWSLogsStrategyDTO {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
* @nullable true
|
||||||
|
*/
|
||||||
|
cloudwatch_logs_subscriptions?:
|
||||||
|
| CloudintegrationtypesOldAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem[]
|
||||||
|
| null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type CloudintegrationtypesOldAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem = {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
*/
|
||||||
|
MetricNames?: string[];
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
Namespace?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesOldAWSMetricsStrategyDTO {
|
||||||
|
/**
|
||||||
|
* @type array
|
||||||
|
* @nullable true
|
||||||
|
*/
|
||||||
|
cloudwatch_metric_stream_filters?:
|
||||||
|
| CloudintegrationtypesOldAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem[]
|
||||||
|
| null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesPostableAccountDTO {
|
||||||
|
config: CloudintegrationtypesPostableAccountConfigDTO;
|
||||||
|
credentials: CloudintegrationtypesCredentialsDTO;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesPostableAccountConfigDTO {
|
||||||
|
aws: CloudintegrationtypesAWSPostableAccountConfigDTO;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @nullable
|
* @nullable
|
||||||
*/
|
*/
|
||||||
export type CloudintegrationtypesPostableAgentCheckInRequestDTOData = {
|
export type CloudintegrationtypesPostableAgentCheckInDTOData = {
|
||||||
[key: string]: unknown;
|
[key: string]: unknown;
|
||||||
} | null;
|
} | null;
|
||||||
|
|
||||||
export interface CloudintegrationtypesPostableAgentCheckInRequestDTO {
|
export interface CloudintegrationtypesPostableAgentCheckInDTO {
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -858,7 +957,7 @@ export interface CloudintegrationtypesPostableAgentCheckInRequestDTO {
|
|||||||
* @type object
|
* @type object
|
||||||
* @nullable true
|
* @nullable true
|
||||||
*/
|
*/
|
||||||
data: CloudintegrationtypesPostableAgentCheckInRequestDTOData;
|
data: CloudintegrationtypesPostableAgentCheckInDTOData;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -871,6 +970,7 @@ export interface CloudintegrationtypesProviderIntegrationConfigDTO {
|
|||||||
|
|
||||||
export interface CloudintegrationtypesServiceDTO {
|
export interface CloudintegrationtypesServiceDTO {
|
||||||
assets: CloudintegrationtypesAssetsDTO;
|
assets: CloudintegrationtypesAssetsDTO;
|
||||||
|
cloudIntegrationService: CloudintegrationtypesCloudIntegrationServiceDTO;
|
||||||
dataCollected: CloudintegrationtypesDataCollectedDTO;
|
dataCollected: CloudintegrationtypesDataCollectedDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
@@ -884,9 +984,8 @@ export interface CloudintegrationtypesServiceDTO {
|
|||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
overview: string;
|
overview: string;
|
||||||
serviceConfig?: CloudintegrationtypesServiceConfigDTO;
|
supportedSignals: CloudintegrationtypesSupportedSignalsDTO;
|
||||||
supported_signals: CloudintegrationtypesSupportedSignalsDTO;
|
telemetryCollectionStrategy: CloudintegrationtypesTelemetryCollectionStrategyDTO;
|
||||||
telemetryCollectionStrategy: CloudintegrationtypesCollectionStrategyDTO;
|
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -897,6 +996,21 @@ export interface CloudintegrationtypesServiceConfigDTO {
|
|||||||
aws: CloudintegrationtypesAWSServiceConfigDTO;
|
aws: CloudintegrationtypesAWSServiceConfigDTO;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum CloudintegrationtypesServiceIDDTO {
|
||||||
|
alb = 'alb',
|
||||||
|
'api-gateway' = 'api-gateway',
|
||||||
|
dynamodb = 'dynamodb',
|
||||||
|
ec2 = 'ec2',
|
||||||
|
ecs = 'ecs',
|
||||||
|
eks = 'eks',
|
||||||
|
elasticache = 'elasticache',
|
||||||
|
lambda = 'lambda',
|
||||||
|
msk = 'msk',
|
||||||
|
rds = 'rds',
|
||||||
|
s3sync = 's3sync',
|
||||||
|
sns = 'sns',
|
||||||
|
sqs = 'sqs',
|
||||||
|
}
|
||||||
export interface CloudintegrationtypesServiceMetadataDTO {
|
export interface CloudintegrationtypesServiceMetadataDTO {
|
||||||
/**
|
/**
|
||||||
* @type boolean
|
* @type boolean
|
||||||
@@ -927,6 +1041,10 @@ export interface CloudintegrationtypesSupportedSignalsDTO {
|
|||||||
metrics?: boolean;
|
metrics?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface CloudintegrationtypesTelemetryCollectionStrategyDTO {
|
||||||
|
aws: CloudintegrationtypesAWSTelemetryCollectionStrategyDTO;
|
||||||
|
}
|
||||||
|
|
||||||
export interface CloudintegrationtypesUpdatableAccountDTO {
|
export interface CloudintegrationtypesUpdatableAccountDTO {
|
||||||
config: CloudintegrationtypesAccountConfigDTO;
|
config: CloudintegrationtypesAccountConfigDTO;
|
||||||
}
|
}
|
||||||
@@ -3450,7 +3568,7 @@ export type AgentCheckInDeprecatedPathParameters = {
|
|||||||
cloudProvider: string;
|
cloudProvider: string;
|
||||||
};
|
};
|
||||||
export type AgentCheckInDeprecated200 = {
|
export type AgentCheckInDeprecated200 = {
|
||||||
data: CloudintegrationtypesGettableAgentCheckInResponseDTO;
|
data: CloudintegrationtypesGettableAgentCheckInDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3472,7 +3590,7 @@ export type CreateAccountPathParameters = {
|
|||||||
cloudProvider: string;
|
cloudProvider: string;
|
||||||
};
|
};
|
||||||
export type CreateAccount200 = {
|
export type CreateAccount200 = {
|
||||||
data: CloudintegrationtypesGettableAccountWithArtifactDTO;
|
data: CloudintegrationtypesGettableAccountWithConnectionArtifactDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3499,11 +3617,27 @@ export type UpdateAccountPathParameters = {
|
|||||||
cloudProvider: string;
|
cloudProvider: string;
|
||||||
id: string;
|
id: string;
|
||||||
};
|
};
|
||||||
|
export type UpdateServicePathParameters = {
|
||||||
|
cloudProvider: string;
|
||||||
|
id: string;
|
||||||
|
serviceId: string;
|
||||||
|
};
|
||||||
export type AgentCheckInPathParameters = {
|
export type AgentCheckInPathParameters = {
|
||||||
cloudProvider: string;
|
cloudProvider: string;
|
||||||
};
|
};
|
||||||
export type AgentCheckIn200 = {
|
export type AgentCheckIn200 = {
|
||||||
data: CloudintegrationtypesGettableAgentCheckInResponseDTO;
|
data: CloudintegrationtypesGettableAgentCheckInDTO;
|
||||||
|
/**
|
||||||
|
* @type string
|
||||||
|
*/
|
||||||
|
status: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type GetConnectionCredentialsPathParameters = {
|
||||||
|
cloudProvider: string;
|
||||||
|
};
|
||||||
|
export type GetConnectionCredentials200 = {
|
||||||
|
data: CloudintegrationtypesCredentialsDTO;
|
||||||
/**
|
/**
|
||||||
* @type string
|
* @type string
|
||||||
*/
|
*/
|
||||||
@@ -3533,10 +3667,6 @@ export type GetService200 = {
|
|||||||
status: string;
|
status: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type UpdateServicePathParameters = {
|
|
||||||
cloudProvider: string;
|
|
||||||
serviceId: string;
|
|
||||||
};
|
|
||||||
export type CreateSessionByGoogleCallback303 = {
|
export type CreateSessionByGoogleCallback303 = {
|
||||||
data: AuthtypesGettableTokenDTO;
|
data: AuthtypesGettableTokenDTO;
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -10,6 +10,26 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
||||||
|
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/credentials", handler.New(
|
||||||
|
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.GetConnectionCredentials),
|
||||||
|
handler.OpenAPIDef{
|
||||||
|
ID: "GetConnectionCredentials",
|
||||||
|
Tags: []string{"cloudintegration"},
|
||||||
|
Summary: "Get connection credentials",
|
||||||
|
Description: "This endpoint retrieves the connection credentials required for integration",
|
||||||
|
Request: nil,
|
||||||
|
RequestContentType: "application/json",
|
||||||
|
Response: new(citypes.Credentials),
|
||||||
|
ResponseContentType: "application/json",
|
||||||
|
SuccessStatusCode: http.StatusOK,
|
||||||
|
ErrorStatusCodes: []int{},
|
||||||
|
Deprecated: false,
|
||||||
|
SecuritySchemes: newSecuritySchemes(types.RoleAdmin),
|
||||||
|
},
|
||||||
|
)).Methods(http.MethodGet).GetError(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts", handler.New(
|
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts", handler.New(
|
||||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.CreateAccount),
|
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.CreateAccount),
|
||||||
handler.OpenAPIDef{
|
handler.OpenAPIDef{
|
||||||
@@ -17,9 +37,9 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
|||||||
Tags: []string{"cloudintegration"},
|
Tags: []string{"cloudintegration"},
|
||||||
Summary: "Create account",
|
Summary: "Create account",
|
||||||
Description: "This endpoint creates a new cloud integration account for the specified cloud provider",
|
Description: "This endpoint creates a new cloud integration account for the specified cloud provider",
|
||||||
Request: new(citypes.PostableConnectionArtifact),
|
Request: new(citypes.PostableAccount),
|
||||||
RequestContentType: "application/json",
|
RequestContentType: "application/json",
|
||||||
Response: new(citypes.GettableAccountWithArtifact),
|
Response: new(citypes.GettableAccountWithConnectionArtifact),
|
||||||
ResponseContentType: "application/json",
|
ResponseContentType: "application/json",
|
||||||
SuccessStatusCode: http.StatusOK,
|
SuccessStatusCode: http.StatusOK,
|
||||||
ErrorStatusCodes: []int{},
|
ErrorStatusCodes: []int{},
|
||||||
@@ -59,7 +79,7 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
|||||||
Description: "This endpoint gets an account for the specified cloud provider",
|
Description: "This endpoint gets an account for the specified cloud provider",
|
||||||
Request: nil,
|
Request: nil,
|
||||||
RequestContentType: "",
|
RequestContentType: "",
|
||||||
Response: new(citypes.GettableAccount),
|
Response: new(citypes.Account),
|
||||||
ResponseContentType: "application/json",
|
ResponseContentType: "application/json",
|
||||||
SuccessStatusCode: http.StatusOK,
|
SuccessStatusCode: http.StatusOK,
|
||||||
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusNotFound},
|
ErrorStatusCodes: []int{http.StatusBadRequest, http.StatusNotFound},
|
||||||
@@ -139,7 +159,7 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
|||||||
Description: "This endpoint gets a service for the specified cloud provider",
|
Description: "This endpoint gets a service for the specified cloud provider",
|
||||||
Request: nil,
|
Request: nil,
|
||||||
RequestContentType: "",
|
RequestContentType: "",
|
||||||
Response: new(citypes.GettableService),
|
Response: new(citypes.Service),
|
||||||
ResponseContentType: "application/json",
|
ResponseContentType: "application/json",
|
||||||
SuccessStatusCode: http.StatusOK,
|
SuccessStatusCode: http.StatusOK,
|
||||||
ErrorStatusCodes: []int{},
|
ErrorStatusCodes: []int{},
|
||||||
@@ -150,7 +170,7 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/services/{service_id}", handler.New(
|
if err := router.Handle("/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}/services/{service_id}", handler.New(
|
||||||
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.UpdateService),
|
provider.authZ.AdminAccess(provider.cloudIntegrationHandler.UpdateService),
|
||||||
handler.OpenAPIDef{
|
handler.OpenAPIDef{
|
||||||
ID: "UpdateService",
|
ID: "UpdateService",
|
||||||
@@ -179,9 +199,9 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
|||||||
Tags: []string{"cloudintegration"},
|
Tags: []string{"cloudintegration"},
|
||||||
Summary: "Agent check-in",
|
Summary: "Agent check-in",
|
||||||
Description: "[Deprecated] This endpoint is called by the deployed agent to check in",
|
Description: "[Deprecated] This endpoint is called by the deployed agent to check in",
|
||||||
Request: new(citypes.PostableAgentCheckInRequest),
|
Request: new(citypes.PostableAgentCheckIn),
|
||||||
RequestContentType: "application/json",
|
RequestContentType: "application/json",
|
||||||
Response: new(citypes.GettableAgentCheckInResponse),
|
Response: new(citypes.GettableAgentCheckIn),
|
||||||
ResponseContentType: "application/json",
|
ResponseContentType: "application/json",
|
||||||
SuccessStatusCode: http.StatusOK,
|
SuccessStatusCode: http.StatusOK,
|
||||||
ErrorStatusCodes: []int{},
|
ErrorStatusCodes: []int{},
|
||||||
@@ -199,9 +219,9 @@ func (provider *provider) addCloudIntegrationRoutes(router *mux.Router) error {
|
|||||||
Tags: []string{"cloudintegration"},
|
Tags: []string{"cloudintegration"},
|
||||||
Summary: "Agent check-in",
|
Summary: "Agent check-in",
|
||||||
Description: "This endpoint is called by the deployed agent to check in",
|
Description: "This endpoint is called by the deployed agent to check in",
|
||||||
Request: new(citypes.PostableAgentCheckInRequest),
|
Request: new(citypes.PostableAgentCheckIn),
|
||||||
RequestContentType: "application/json",
|
RequestContentType: "application/json",
|
||||||
Response: new(citypes.GettableAgentCheckInResponse),
|
Response: new(citypes.GettableAgentCheckIn),
|
||||||
ResponseContentType: "application/json",
|
ResponseContentType: "application/json",
|
||||||
SuccessStatusCode: http.StatusOK,
|
SuccessStatusCode: http.StatusOK,
|
||||||
ErrorStatusCodes: []int{},
|
ErrorStatusCodes: []int{},
|
||||||
|
|||||||
@@ -10,37 +10,42 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Module interface {
|
type Module interface {
|
||||||
|
GetConnectionCredentials(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType) (*citypes.Credentials, error)
|
||||||
|
|
||||||
CreateAccount(ctx context.Context, account *citypes.Account) error
|
CreateAccount(ctx context.Context, account *citypes.Account) error
|
||||||
|
|
||||||
// GetAccount returns cloud integration account
|
// GetAccount returns cloud integration account
|
||||||
GetAccount(ctx context.Context, orgID, accountID valuer.UUID) (*citypes.Account, error)
|
GetAccount(ctx context.Context, orgID, accountID valuer.UUID, provider citypes.CloudProviderType) (*citypes.Account, error)
|
||||||
|
|
||||||
// ListAccounts lists accounts where agent is connected
|
// ListAccounts lists accounts where agent is connected
|
||||||
ListAccounts(ctx context.Context, orgID valuer.UUID) ([]*citypes.Account, error)
|
ListAccounts(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType) ([]*citypes.Account, error)
|
||||||
|
|
||||||
// UpdateAccount updates the cloud integration account for a specific organization.
|
// UpdateAccount updates the cloud integration account for a specific organization.
|
||||||
UpdateAccount(ctx context.Context, account *citypes.Account) error
|
UpdateAccount(ctx context.Context, account *citypes.Account) error
|
||||||
|
|
||||||
// DisconnectAccount soft deletes/removes a cloud integration account.
|
// DisconnectAccount soft deletes/removes a cloud integration account.
|
||||||
DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID) error
|
DisconnectAccount(ctx context.Context, orgID, accountID valuer.UUID, provider citypes.CloudProviderType) error
|
||||||
|
|
||||||
// GetConnectionArtifact returns cloud provider specific connection information,
|
// GetConnectionArtifact returns cloud provider specific connection information,
|
||||||
// client side handles how this information is shown
|
// client side handles how this information is shown
|
||||||
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.ConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.GetConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
||||||
|
|
||||||
// ListServicesMetadata returns the list of services metadata for a cloud provider attached with the integrationID.
|
// ListServicesMetadata returns the list of supported services' metadata for a cloud provider with optional filtering for a specific integration
|
||||||
// This just returns a summary of the service and not the whole service definition
|
// This just returns a summary of the service and not the whole service definition.
|
||||||
ListServicesMetadata(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID) ([]*citypes.ServiceMetadata, error)
|
ListServicesMetadata(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType, integrationID *valuer.UUID) ([]*citypes.ServiceMetadata, error)
|
||||||
|
|
||||||
// GetService returns service definition details for a serviceID. This returns config and
|
// GetService returns service definition details for a serviceID. This optionally returns the service config
|
||||||
// other details required to show in service details page on web client.
|
// for integrationID if provided.
|
||||||
GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID string) (*citypes.Service, error)
|
GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID citypes.ServiceID, provider citypes.CloudProviderType) (*citypes.Service, error)
|
||||||
|
|
||||||
|
// CreateService creates a new service for a cloud integration account.
|
||||||
|
CreateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService, provider citypes.CloudProviderType) error
|
||||||
|
|
||||||
// UpdateService updates cloud integration service
|
// UpdateService updates cloud integration service
|
||||||
UpdateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService) error
|
UpdateService(ctx context.Context, orgID valuer.UUID, service *citypes.CloudIntegrationService, provider citypes.CloudProviderType) error
|
||||||
|
|
||||||
// AgentCheckIn is called by agent to heartbeat and get latest config in response.
|
// AgentCheckIn is called by agent to send heartbeat and get latest config in response.
|
||||||
AgentCheckIn(ctx context.Context, orgID valuer.UUID, req *citypes.AgentCheckInRequest) (*citypes.AgentCheckInResponse, error)
|
AgentCheckIn(ctx context.Context, orgID valuer.UUID, provider citypes.CloudProviderType, req *citypes.AgentCheckInRequest) (*citypes.AgentCheckInResponse, error)
|
||||||
|
|
||||||
// GetDashboardByID returns dashboard JSON for a given dashboard id.
|
// GetDashboardByID returns dashboard JSON for a given dashboard id.
|
||||||
// this only returns the dashboard when the service (embedded in dashboard id) is enabled
|
// this only returns the dashboard when the service (embedded in dashboard id) is enabled
|
||||||
@@ -52,7 +57,22 @@ type Module interface {
|
|||||||
ListDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
ListDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type CloudProviderModule interface {
|
||||||
|
GetConnectionArtifact(ctx context.Context, account *citypes.Account, req *citypes.GetConnectionArtifactRequest) (*citypes.ConnectionArtifact, error)
|
||||||
|
|
||||||
|
// ListServiceDefinitions returns all service definitions for this cloud provider.
|
||||||
|
ListServiceDefinitions(ctx context.Context) ([]*citypes.ServiceDefinition, error)
|
||||||
|
|
||||||
|
// GetServiceDefinition returns the service definition for the given service ID.
|
||||||
|
GetServiceDefinition(ctx context.Context, serviceID citypes.ServiceID) (*citypes.ServiceDefinition, error)
|
||||||
|
|
||||||
|
// BuildIntegrationConfig compiles the provider-specific integration config from the account
|
||||||
|
// and list of configured services. This is the config returned to the agent on check-in.
|
||||||
|
BuildIntegrationConfig(ctx context.Context, account *citypes.Account, services []*citypes.StorableCloudIntegrationService) (*citypes.ProviderIntegrationConfig, error)
|
||||||
|
}
|
||||||
|
|
||||||
type Handler interface {
|
type Handler interface {
|
||||||
|
GetConnectionCredentials(http.ResponseWriter, *http.Request)
|
||||||
CreateAccount(http.ResponseWriter, *http.Request)
|
CreateAccount(http.ResponseWriter, *http.Request)
|
||||||
ListAccounts(http.ResponseWriter, *http.Request)
|
ListAccounts(http.ResponseWriter, *http.Request)
|
||||||
GetAccount(http.ResponseWriter, *http.Request)
|
GetAccount(http.ResponseWriter, *http.Request)
|
||||||
|
|||||||
@@ -447,9 +447,9 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/ApplicationELB"
|
"namespace": "AWS/ApplicationELB"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -171,14 +171,14 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/ApiGateway"
|
"namespace": "AWS/ApiGateway"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"logs": {
|
"logs": {
|
||||||
"cloudwatchLogsSubscriptions": [
|
"subscriptions": [
|
||||||
{
|
{
|
||||||
"logGroupNamePrefix": "API-Gateway",
|
"logGroupNamePrefix": "API-Gateway",
|
||||||
"filterPattern": ""
|
"filterPattern": ""
|
||||||
|
|||||||
@@ -374,9 +374,9 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/DynamoDB"
|
"namespace": "AWS/DynamoDB"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -495,12 +495,12 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/EC2"
|
"namespace": "AWS/EC2"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Namespace": "CWAgent"
|
"namespace": "CWAgent"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -823,17 +823,17 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/ECS"
|
"namespace": "AWS/ECS"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Namespace": "ECS/ContainerInsights"
|
"namespace": "ECS/ContainerInsights"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"logs": {
|
"logs": {
|
||||||
"cloudwatchLogsSubscriptions": [
|
"subscriptions": [
|
||||||
{
|
{
|
||||||
"logGroupNamePrefix": "/ecs",
|
"logGroupNamePrefix": "/ecs",
|
||||||
"filterPattern": ""
|
"filterPattern": ""
|
||||||
|
|||||||
@@ -2702,17 +2702,17 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/EKS"
|
"namespace": "AWS/EKS"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"Namespace": "ContainerInsights"
|
"namespace": "ContainerInsights"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"logs": {
|
"logs": {
|
||||||
"cloudwatchLogsSubscriptions": [
|
"subscriptions": [
|
||||||
{
|
{
|
||||||
"logGroupNamePrefix": "/aws/containerinsights",
|
"logGroupNamePrefix": "/aws/containerinsights",
|
||||||
"filterPattern": ""
|
"filterPattern": ""
|
||||||
|
|||||||
@@ -1934,9 +1934,9 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/ElastiCache"
|
"namespace": "AWS/ElastiCache"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -271,14 +271,14 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/Lambda"
|
"namespace": "AWS/Lambda"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"logs": {
|
"logs": {
|
||||||
"cloudwatchLogsSubscriptions": [
|
"subscriptions": [
|
||||||
{
|
{
|
||||||
"logGroupNamePrefix": "/aws/lambda",
|
"logGroupNamePrefix": "/aws/lambda",
|
||||||
"filterPattern": ""
|
"filterPattern": ""
|
||||||
|
|||||||
@@ -1070,9 +1070,9 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/Kafka"
|
"namespace": "AWS/Kafka"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -775,14 +775,14 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/RDS"
|
"namespace": "AWS/RDS"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"logs": {
|
"logs": {
|
||||||
"cloudwatchLogsSubscriptions": [
|
"subscriptions": [
|
||||||
{
|
{
|
||||||
"logGroupNamePrefix": "/aws/rds",
|
"logGroupNamePrefix": "/aws/rds",
|
||||||
"filterPattern": ""
|
"filterPattern": ""
|
||||||
|
|||||||
@@ -39,7 +39,7 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"logs": {
|
"logs": {
|
||||||
"cloudwatchLogsSubscriptions": [
|
"subscriptions": [
|
||||||
{
|
{
|
||||||
"logGroupNamePrefix": "x/signoz/forwarder",
|
"logGroupNamePrefix": "x/signoz/forwarder",
|
||||||
"filterPattern": ""
|
"filterPattern": ""
|
||||||
|
|||||||
@@ -110,9 +110,9 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/SNS"
|
"namespace": "AWS/SNS"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -230,9 +230,9 @@
|
|||||||
"telemetryCollectionStrategy": {
|
"telemetryCollectionStrategy": {
|
||||||
"aws": {
|
"aws": {
|
||||||
"metrics": {
|
"metrics": {
|
||||||
"cloudwatchMetricStreamFilters": [
|
"streamFilters": [
|
||||||
{
|
{
|
||||||
"Namespace": "AWS/SQS"
|
"namespace": "AWS/SQS"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,6 +12,10 @@ func NewHandler() cloudintegration.Handler {
|
|||||||
return &handler{}
|
return &handler{}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (handler *handler) GetConnectionCredentials(http.ResponseWriter, *http.Request) {
|
||||||
|
panic("unimplemented")
|
||||||
|
}
|
||||||
|
|
||||||
func (handler *handler) CreateAccount(writer http.ResponseWriter, request *http.Request) {
|
func (handler *handler) CreateAccount(writer http.ResponseWriter, request *http.Request) {
|
||||||
// TODO implement me
|
// TODO implement me
|
||||||
panic("implement me")
|
panic("implement me")
|
||||||
|
|||||||
@@ -34,6 +34,25 @@ func (store *store) GetAccountByID(ctx context.Context, orgID, id valuer.UUID, p
|
|||||||
return account, nil
|
return account, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (store *store) GetConnectedAccount(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, providerAccountID string) (*cloudintegrationtypes.StorableCloudIntegration, error) {
|
||||||
|
account := new(cloudintegrationtypes.StorableCloudIntegration)
|
||||||
|
err := store.
|
||||||
|
store.
|
||||||
|
BunDBCtx(ctx).
|
||||||
|
NewSelect().
|
||||||
|
Model(account).
|
||||||
|
Where("org_id = ?", orgID).
|
||||||
|
Where("provider = ?", provider).
|
||||||
|
Where("account_id = ?", providerAccountID).
|
||||||
|
Where("last_agent_report IS NOT NULL").
|
||||||
|
Where("removed_at IS NULL").
|
||||||
|
Scan(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, store.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "connected account with provider account id %s not found", providerAccountID)
|
||||||
|
}
|
||||||
|
return account, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (store *store) ListConnectedAccounts(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) ([]*cloudintegrationtypes.StorableCloudIntegration, error) {
|
func (store *store) ListConnectedAccounts(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) ([]*cloudintegrationtypes.StorableCloudIntegration, error) {
|
||||||
var accounts []*cloudintegrationtypes.StorableCloudIntegration
|
var accounts []*cloudintegrationtypes.StorableCloudIntegration
|
||||||
err := store.
|
err := store.
|
||||||
@@ -96,25 +115,6 @@ func (store *store) RemoveAccount(ctx context.Context, orgID, id valuer.UUID, pr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (store *store) GetConnectedAccount(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, providerAccountID string) (*cloudintegrationtypes.StorableCloudIntegration, error) {
|
|
||||||
account := new(cloudintegrationtypes.StorableCloudIntegration)
|
|
||||||
err := store.
|
|
||||||
store.
|
|
||||||
BunDBCtx(ctx).
|
|
||||||
NewSelect().
|
|
||||||
Model(account).
|
|
||||||
Where("org_id = ?", orgID).
|
|
||||||
Where("provider = ?", provider).
|
|
||||||
Where("account_id = ?", providerAccountID).
|
|
||||||
Where("last_agent_report IS NOT NULL").
|
|
||||||
Where("removed_at IS NULL").
|
|
||||||
Scan(ctx)
|
|
||||||
if err != nil {
|
|
||||||
return nil, store.store.WrapNotFoundErrf(err, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "connected account with provider account id %s not found", providerAccountID)
|
|
||||||
}
|
|
||||||
return account, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (store *store) GetServiceByServiceID(ctx context.Context, cloudIntegrationID valuer.UUID, serviceID cloudintegrationtypes.ServiceID) (*cloudintegrationtypes.StorableCloudIntegrationService, error) {
|
func (store *store) GetServiceByServiceID(ctx context.Context, cloudIntegrationID valuer.UUID, serviceID cloudintegrationtypes.ServiceID) (*cloudintegrationtypes.StorableCloudIntegrationService, error) {
|
||||||
service := new(cloudintegrationtypes.StorableCloudIntegrationService)
|
service := new(cloudintegrationtypes.StorableCloudIntegrationService)
|
||||||
err := store.
|
err := store.
|
||||||
@@ -172,3 +172,9 @@ func (store *store) UpdateService(ctx context.Context, service *cloudintegration
|
|||||||
Exec(ctx)
|
Exec(ctx)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
|
||||||
|
return store.store.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||||
|
return cb(ctx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|||||||
19
pkg/modules/tracedetail/tracedetail.go
Normal file
19
pkg/modules/tracedetail/tracedetail.go
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
package tracedetail
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/tracedetailtypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler exposes HTTP handlers for trace detail APIs.
|
||||||
|
type Handler interface {
|
||||||
|
GetWaterfall(http.ResponseWriter, *http.Request)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Module defines the business logic for trace detail operations.
|
||||||
|
type Module interface {
|
||||||
|
GetWaterfall(ctx context.Context, orgID valuer.UUID, traceID string, req *tracedetailtypes.WaterfallRequest) (*tracedetailtypes.WaterfallResponse, error)
|
||||||
|
}
|
||||||
@@ -40,6 +40,7 @@ type querier struct {
|
|||||||
promEngine prometheus.Prometheus
|
promEngine prometheus.Prometheus
|
||||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
|
||||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||||
|
auditStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
|
||||||
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder
|
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder
|
||||||
@@ -56,6 +57,7 @@ func New(
|
|||||||
promEngine prometheus.Prometheus,
|
promEngine prometheus.Prometheus,
|
||||||
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
|
||||||
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||||
|
auditStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
|
||||||
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||||
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
|
||||||
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder,
|
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder,
|
||||||
@@ -69,6 +71,7 @@ func New(
|
|||||||
promEngine: promEngine,
|
promEngine: promEngine,
|
||||||
traceStmtBuilder: traceStmtBuilder,
|
traceStmtBuilder: traceStmtBuilder,
|
||||||
logStmtBuilder: logStmtBuilder,
|
logStmtBuilder: logStmtBuilder,
|
||||||
|
auditStmtBuilder: auditStmtBuilder,
|
||||||
metricStmtBuilder: metricStmtBuilder,
|
metricStmtBuilder: metricStmtBuilder,
|
||||||
meterStmtBuilder: meterStmtBuilder,
|
meterStmtBuilder: meterStmtBuilder,
|
||||||
traceOperatorStmtBuilder: traceOperatorStmtBuilder,
|
traceOperatorStmtBuilder: traceOperatorStmtBuilder,
|
||||||
@@ -361,7 +364,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
|||||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||||
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
|
||||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
|
||||||
bq := newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, tmplVars)
|
stmtBuilder := q.logStmtBuilder
|
||||||
|
if spec.Source == telemetrytypes.SourceAudit {
|
||||||
|
stmtBuilder = q.auditStmtBuilder
|
||||||
|
}
|
||||||
|
bq := newBuilderQuery(q.logger, q.telemetryStore, stmtBuilder, spec, timeRange, req.RequestType, tmplVars)
|
||||||
queries[spec.Name] = bq
|
queries[spec.Name] = bq
|
||||||
steps[spec.Name] = spec.StepInterval
|
steps[spec.Name] = spec.StepInterval
|
||||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||||
@@ -550,7 +557,11 @@ func (q *querier) QueryRawStream(ctx context.Context, orgID valuer.UUID, req *qb
|
|||||||
case <-tick:
|
case <-tick:
|
||||||
// timestamp end is not specified here
|
// timestamp end is not specified here
|
||||||
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: tsStart}, req.RequestType)
|
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: tsStart}, req.RequestType)
|
||||||
bq := newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, map[string]qbtypes.VariableItem{
|
liveTailStmtBuilder := q.logStmtBuilder
|
||||||
|
if spec.Source == telemetrytypes.SourceAudit {
|
||||||
|
liveTailStmtBuilder = q.auditStmtBuilder
|
||||||
|
}
|
||||||
|
bq := newBuilderQuery(q.logger, q.telemetryStore, liveTailStmtBuilder, spec, timeRange, req.RequestType, map[string]qbtypes.VariableItem{
|
||||||
"id": {
|
"id": {
|
||||||
Value: updatedLogID,
|
Value: updatedLogID,
|
||||||
},
|
},
|
||||||
@@ -850,7 +861,11 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
|
|||||||
specCopy := qt.spec.Copy()
|
specCopy := qt.spec.Copy()
|
||||||
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
|
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
|
||||||
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
|
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
|
||||||
return newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
|
shiftStmtBuilder := q.logStmtBuilder
|
||||||
|
if qt.spec.Source == telemetrytypes.SourceAudit {
|
||||||
|
shiftStmtBuilder = q.auditStmtBuilder
|
||||||
|
}
|
||||||
|
return newBuilderQuery(q.logger, q.telemetryStore, shiftStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
|
||||||
|
|
||||||
case *builderQuery[qbtypes.MetricAggregation]:
|
case *builderQuery[qbtypes.MetricAggregation]:
|
||||||
specCopy := qt.spec.Copy()
|
specCopy := qt.spec.Copy()
|
||||||
|
|||||||
@@ -47,6 +47,7 @@ func TestQueryRange_MetricTypeMissing(t *testing.T) {
|
|||||||
nil, // prometheus
|
nil, // prometheus
|
||||||
nil, // traceStmtBuilder
|
nil, // traceStmtBuilder
|
||||||
nil, // logStmtBuilder
|
nil, // logStmtBuilder
|
||||||
|
nil, // auditStmtBuilder
|
||||||
nil, // metricStmtBuilder
|
nil, // metricStmtBuilder
|
||||||
nil, // meterStmtBuilder
|
nil, // meterStmtBuilder
|
||||||
nil, // traceOperatorStmtBuilder
|
nil, // traceOperatorStmtBuilder
|
||||||
@@ -110,6 +111,7 @@ func TestQueryRange_MetricTypeFromStore(t *testing.T) {
|
|||||||
nil, // prometheus
|
nil, // prometheus
|
||||||
nil, // traceStmtBuilder
|
nil, // traceStmtBuilder
|
||||||
nil, // logStmtBuilder
|
nil, // logStmtBuilder
|
||||||
|
nil, // auditStmtBuilder
|
||||||
&mockMetricStmtBuilder{}, // metricStmtBuilder
|
&mockMetricStmtBuilder{}, // metricStmtBuilder
|
||||||
nil, // meterStmtBuilder
|
nil, // meterStmtBuilder
|
||||||
nil, // traceOperatorStmtBuilder
|
nil, // traceOperatorStmtBuilder
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/prometheus"
|
"github.com/SigNoz/signoz/pkg/prometheus"
|
||||||
"github.com/SigNoz/signoz/pkg/querier"
|
"github.com/SigNoz/signoz/pkg/querier"
|
||||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetryaudit"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||||
@@ -63,6 +64,11 @@ func newProvider(
|
|||||||
telemetrylogs.TagAttributesV2TableName,
|
telemetrylogs.TagAttributesV2TableName,
|
||||||
telemetrylogs.LogAttributeKeysTblName,
|
telemetrylogs.LogAttributeKeysTblName,
|
||||||
telemetrylogs.LogResourceKeysTblName,
|
telemetrylogs.LogResourceKeysTblName,
|
||||||
|
telemetryaudit.DBName,
|
||||||
|
telemetryaudit.AuditLogsTableName,
|
||||||
|
telemetryaudit.TagAttributesTableName,
|
||||||
|
telemetryaudit.LogAttributeKeysTblName,
|
||||||
|
telemetryaudit.LogResourceKeysTblName,
|
||||||
telemetrymetadata.DBName,
|
telemetrymetadata.DBName,
|
||||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||||
@@ -82,13 +88,13 @@ func newProvider(
|
|||||||
telemetryStore,
|
telemetryStore,
|
||||||
)
|
)
|
||||||
|
|
||||||
// ADD: Create trace operator statement builder
|
// Create trace operator statement builder
|
||||||
traceOperatorStmtBuilder := telemetrytraces.NewTraceOperatorStatementBuilder(
|
traceOperatorStmtBuilder := telemetrytraces.NewTraceOperatorStatementBuilder(
|
||||||
settings,
|
settings,
|
||||||
telemetryMetadataStore,
|
telemetryMetadataStore,
|
||||||
traceFieldMapper,
|
traceFieldMapper,
|
||||||
traceConditionBuilder,
|
traceConditionBuilder,
|
||||||
traceStmtBuilder, // Pass the regular trace statement builder
|
traceStmtBuilder,
|
||||||
traceAggExprRewriter,
|
traceAggExprRewriter,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -112,6 +118,26 @@ func newProvider(
|
|||||||
telemetrylogs.GetBodyJSONKey,
|
telemetrylogs.GetBodyJSONKey,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Create audit statement builder
|
||||||
|
auditFieldMapper := telemetryaudit.NewFieldMapper()
|
||||||
|
auditConditionBuilder := telemetryaudit.NewConditionBuilder(auditFieldMapper)
|
||||||
|
auditAggExprRewriter := querybuilder.NewAggExprRewriter(
|
||||||
|
settings,
|
||||||
|
telemetryaudit.DefaultFullTextColumn,
|
||||||
|
auditFieldMapper,
|
||||||
|
auditConditionBuilder,
|
||||||
|
nil,
|
||||||
|
)
|
||||||
|
auditStmtBuilder := telemetryaudit.NewAuditQueryStatementBuilder(
|
||||||
|
settings,
|
||||||
|
telemetryMetadataStore,
|
||||||
|
auditFieldMapper,
|
||||||
|
auditConditionBuilder,
|
||||||
|
auditAggExprRewriter,
|
||||||
|
telemetryaudit.DefaultFullTextColumn,
|
||||||
|
nil,
|
||||||
|
)
|
||||||
|
|
||||||
// Create metric statement builder
|
// Create metric statement builder
|
||||||
metricFieldMapper := telemetrymetrics.NewFieldMapper()
|
metricFieldMapper := telemetrymetrics.NewFieldMapper()
|
||||||
metricConditionBuilder := telemetrymetrics.NewConditionBuilder(metricFieldMapper)
|
metricConditionBuilder := telemetrymetrics.NewConditionBuilder(metricFieldMapper)
|
||||||
@@ -148,6 +174,7 @@ func newProvider(
|
|||||||
prometheus,
|
prometheus,
|
||||||
traceStmtBuilder,
|
traceStmtBuilder,
|
||||||
logStmtBuilder,
|
logStmtBuilder,
|
||||||
|
auditStmtBuilder,
|
||||||
metricStmtBuilder,
|
metricStmtBuilder,
|
||||||
meterStmtBuilder,
|
meterStmtBuilder,
|
||||||
traceOperatorStmtBuilder,
|
traceOperatorStmtBuilder,
|
||||||
|
|||||||
@@ -46,6 +46,7 @@ func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.Teleme
|
|||||||
nil, // prometheus
|
nil, // prometheus
|
||||||
nil, // traceStmtBuilder
|
nil, // traceStmtBuilder
|
||||||
nil, // logStmtBuilder
|
nil, // logStmtBuilder
|
||||||
|
nil, // auditStmtBuilder
|
||||||
metricStmtBuilder,
|
metricStmtBuilder,
|
||||||
nil, // meterStmtBuilder
|
nil, // meterStmtBuilder
|
||||||
nil, // traceOperatorStmtBuilder
|
nil, // traceOperatorStmtBuilder
|
||||||
@@ -91,6 +92,7 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
|
|||||||
nil, // prometheus
|
nil, // prometheus
|
||||||
nil, // traceStmtBuilder
|
nil, // traceStmtBuilder
|
||||||
logStmtBuilder, // logStmtBuilder
|
logStmtBuilder, // logStmtBuilder
|
||||||
|
nil, // auditStmtBuilder
|
||||||
nil, // metricStmtBuilder
|
nil, // metricStmtBuilder
|
||||||
nil, // meterStmtBuilder
|
nil, // meterStmtBuilder
|
||||||
nil, // traceOperatorStmtBuilder
|
nil, // traceOperatorStmtBuilder
|
||||||
@@ -131,6 +133,7 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM
|
|||||||
nil, // prometheus
|
nil, // prometheus
|
||||||
traceStmtBuilder, // traceStmtBuilder
|
traceStmtBuilder, // traceStmtBuilder
|
||||||
nil, // logStmtBuilder
|
nil, // logStmtBuilder
|
||||||
|
nil, // auditStmtBuilder
|
||||||
nil, // metricStmtBuilder
|
nil, // metricStmtBuilder
|
||||||
nil, // meterStmtBuilder
|
nil, // meterStmtBuilder
|
||||||
nil, // traceOperatorStmtBuilder
|
nil, // traceOperatorStmtBuilder
|
||||||
|
|||||||
@@ -818,9 +818,9 @@ func (v *filterExpressionVisitor) VisitFunctionCall(ctx *grammar.FunctionCallCon
|
|||||||
case "has":
|
case "has":
|
||||||
cond = fmt.Sprintf("has(%s, %s)", fieldName, v.builder.Var(value[0]))
|
cond = fmt.Sprintf("has(%s, %s)", fieldName, v.builder.Var(value[0]))
|
||||||
case "hasAny":
|
case "hasAny":
|
||||||
cond = fmt.Sprintf("hasAny(%s, %s)", fieldName, v.builder.Var(value))
|
cond = fmt.Sprintf("hasAny(%s, %s)", fieldName, v.builder.Var(value[0]))
|
||||||
case "hasAll":
|
case "hasAll":
|
||||||
cond = fmt.Sprintf("hasAll(%s, %s)", fieldName, v.builder.Var(value))
|
cond = fmt.Sprintf("hasAll(%s, %s)", fieldName, v.builder.Var(value[0]))
|
||||||
}
|
}
|
||||||
conds = append(conds, cond)
|
conds = append(conds, cond)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||||
"github.com/SigNoz/signoz/pkg/statsreporter"
|
"github.com/SigNoz/signoz/pkg/statsreporter"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetryaudit"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||||
@@ -395,6 +396,11 @@ func New(
|
|||||||
telemetrylogs.TagAttributesV2TableName,
|
telemetrylogs.TagAttributesV2TableName,
|
||||||
telemetrylogs.LogAttributeKeysTblName,
|
telemetrylogs.LogAttributeKeysTblName,
|
||||||
telemetrylogs.LogResourceKeysTblName,
|
telemetrylogs.LogResourceKeysTblName,
|
||||||
|
telemetryaudit.DBName,
|
||||||
|
telemetryaudit.AuditLogsTableName,
|
||||||
|
telemetryaudit.TagAttributesTableName,
|
||||||
|
telemetryaudit.LogAttributeKeysTblName,
|
||||||
|
telemetryaudit.LogResourceKeysTblName,
|
||||||
telemetrymetadata.DBName,
|
telemetrymetadata.DBName,
|
||||||
telemetrymetadata.AttributesMetadataLocalTableName,
|
telemetrymetadata.AttributesMetadataLocalTableName,
|
||||||
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
telemetrymetadata.ColumnEvolutionMetadataTableName,
|
||||||
|
|||||||
@@ -6,6 +6,8 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
@@ -23,6 +25,7 @@ type provider struct {
|
|||||||
bundb *sqlstore.BunDB
|
bundb *sqlstore.BunDB
|
||||||
dialect *dialect
|
dialect *dialect
|
||||||
formatter sqlstore.SQLFormatter
|
formatter sqlstore.SQLFormatter
|
||||||
|
done chan struct{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
func NewFactory(hookFactories ...factory.ProviderFactory[sqlstore.SQLStoreHook, sqlstore.Config]) factory.ProviderFactory[sqlstore.SQLStore, sqlstore.Config] {
|
||||||
@@ -59,13 +62,19 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
|
|||||||
|
|
||||||
sqliteDialect := sqlitedialect.New()
|
sqliteDialect := sqlitedialect.New()
|
||||||
bunDB := sqlstore.NewBunDB(settings, sqldb, sqliteDialect, hooks)
|
bunDB := sqlstore.NewBunDB(settings, sqldb, sqliteDialect, hooks)
|
||||||
return &provider{
|
|
||||||
|
done := make(chan struct{})
|
||||||
|
p := &provider{
|
||||||
settings: settings,
|
settings: settings,
|
||||||
sqldb: sqldb,
|
sqldb: sqldb,
|
||||||
bundb: bunDB,
|
bundb: bunDB,
|
||||||
dialect: new(dialect),
|
dialect: new(dialect),
|
||||||
formatter: newFormatter(bunDB.Dialect()),
|
formatter: newFormatter(bunDB.Dialect()),
|
||||||
}, nil
|
done: done,
|
||||||
|
}
|
||||||
|
go p.walDiagnosticLoop(config.Sqlite.Path)
|
||||||
|
|
||||||
|
return p, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (provider *provider) BunDB() *bun.DB {
|
func (provider *provider) BunDB() *bun.DB {
|
||||||
@@ -109,3 +118,73 @@ func (provider *provider) WrapAlreadyExistsErrf(err error, code errors.Code, for
|
|||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// walDiagnosticLoop periodically logs pool stats, WAL file size, and busy prepared statements
|
||||||
|
// to help diagnose WAL checkpoint failures caused by permanent read locks.
|
||||||
|
func (provider *provider) walDiagnosticLoop(dbPath string) {
|
||||||
|
ticker := time.NewTicker(60 * time.Second)
|
||||||
|
defer ticker.Stop()
|
||||||
|
|
||||||
|
logger := provider.settings.Logger()
|
||||||
|
walPath := dbPath + "-wal"
|
||||||
|
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-provider.done:
|
||||||
|
return
|
||||||
|
case <-ticker.C:
|
||||||
|
// 1. Log pool stats (no SQL needed)
|
||||||
|
stats := provider.sqldb.Stats()
|
||||||
|
logger.Info("sqlite_pool_stats",
|
||||||
|
slog.Int("max_open", stats.MaxOpenConnections),
|
||||||
|
slog.Int("open", stats.OpenConnections),
|
||||||
|
slog.Int("in_use", stats.InUse),
|
||||||
|
slog.Int("idle", stats.Idle),
|
||||||
|
slog.Int64("wait_count", stats.WaitCount),
|
||||||
|
slog.String("wait_duration", stats.WaitDuration.String()),
|
||||||
|
slog.Int64("max_idle_closed", stats.MaxIdleClosed),
|
||||||
|
slog.Int64("max_idle_time_closed", stats.MaxIdleTimeClosed),
|
||||||
|
slog.Int64("max_lifetime_closed", stats.MaxLifetimeClosed),
|
||||||
|
)
|
||||||
|
|
||||||
|
// 2. Log WAL file size (no SQL needed)
|
||||||
|
if info, err := os.Stat(walPath); err == nil {
|
||||||
|
logger.Info("sqlite_wal_size",
|
||||||
|
slog.Int64("bytes", info.Size()),
|
||||||
|
slog.String("path", walPath),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Check for busy prepared statements on a single pool connection
|
||||||
|
provider.checkBusyStatements(logger)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (provider *provider) checkBusyStatements(logger *slog.Logger) {
|
||||||
|
conn, err := provider.sqldb.Conn(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("sqlite_diag_conn_error", slog.String("error", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer conn.Close()
|
||||||
|
|
||||||
|
rows, err := conn.QueryContext(context.Background(), "SELECT sql FROM sqlite_stmt WHERE busy")
|
||||||
|
if err != nil {
|
||||||
|
logger.Warn("sqlite_diag_query_error", slog.String("error", err.Error()))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var stmtSQL string
|
||||||
|
if err := rows.Scan(&stmtSQL); err != nil {
|
||||||
|
logger.Warn("sqlite_diag_scan_error", slog.String("error", err.Error()))
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
logger.Warn("leaked_busy_statement", slog.String("sql", stmtSQL))
|
||||||
|
}
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
logger.Warn("sqlite_diag_rows_error", slog.String("error", err.Error()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
200
pkg/telemetryaudit/condition_builder.go
Normal file
200
pkg/telemetryaudit/condition_builder.go
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
package telemetryaudit
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
)
|
||||||
|
|
||||||
|
type conditionBuilder struct {
|
||||||
|
fm qbtypes.FieldMapper
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
|
||||||
|
return &conditionBuilder{fm: fm}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *conditionBuilder) conditionFor(
|
||||||
|
ctx context.Context,
|
||||||
|
startNs, endNs uint64,
|
||||||
|
key *telemetrytypes.TelemetryFieldKey,
|
||||||
|
operator qbtypes.FilterOperator,
|
||||||
|
value any,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
) (string, error) {
|
||||||
|
columns, err := c.fm.ColumnFor(ctx, startNs, endNs, key)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if operator.IsStringSearchOperator() {
|
||||||
|
value = querybuilder.FormatValueForContains(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldExpression, err := c.fm.FieldFor(ctx, startNs, endNs, key)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldExpression, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, fieldExpression, operator)
|
||||||
|
|
||||||
|
switch operator {
|
||||||
|
case qbtypes.FilterOperatorEqual:
|
||||||
|
return sb.E(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorNotEqual:
|
||||||
|
return sb.NE(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorGreaterThan:
|
||||||
|
return sb.G(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorGreaterThanOrEq:
|
||||||
|
return sb.GE(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorLessThan:
|
||||||
|
return sb.LT(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorLessThanOrEq:
|
||||||
|
return sb.LE(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorLike:
|
||||||
|
return sb.Like(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorNotLike:
|
||||||
|
return sb.NotLike(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorILike:
|
||||||
|
return sb.ILike(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorNotILike:
|
||||||
|
return sb.NotILike(fieldExpression, value), nil
|
||||||
|
case qbtypes.FilterOperatorContains:
|
||||||
|
return sb.ILike(fieldExpression, fmt.Sprintf("%%%s%%", value)), nil
|
||||||
|
case qbtypes.FilterOperatorNotContains:
|
||||||
|
return sb.NotILike(fieldExpression, fmt.Sprintf("%%%s%%", value)), nil
|
||||||
|
case qbtypes.FilterOperatorRegexp:
|
||||||
|
return fmt.Sprintf(`match(%s, %s)`, sqlbuilder.Escape(fieldExpression), sb.Var(value)), nil
|
||||||
|
case qbtypes.FilterOperatorNotRegexp:
|
||||||
|
return fmt.Sprintf(`NOT match(%s, %s)`, sqlbuilder.Escape(fieldExpression), sb.Var(value)), nil
|
||||||
|
case qbtypes.FilterOperatorBetween:
|
||||||
|
values, ok := value.([]any)
|
||||||
|
if !ok {
|
||||||
|
return "", qbtypes.ErrBetweenValues
|
||||||
|
}
|
||||||
|
if len(values) != 2 {
|
||||||
|
return "", qbtypes.ErrBetweenValues
|
||||||
|
}
|
||||||
|
return sb.Between(fieldExpression, values[0], values[1]), nil
|
||||||
|
case qbtypes.FilterOperatorNotBetween:
|
||||||
|
values, ok := value.([]any)
|
||||||
|
if !ok {
|
||||||
|
return "", qbtypes.ErrBetweenValues
|
||||||
|
}
|
||||||
|
if len(values) != 2 {
|
||||||
|
return "", qbtypes.ErrBetweenValues
|
||||||
|
}
|
||||||
|
return sb.NotBetween(fieldExpression, values[0], values[1]), nil
|
||||||
|
case qbtypes.FilterOperatorIn:
|
||||||
|
values, ok := value.([]any)
|
||||||
|
if !ok {
|
||||||
|
return "", qbtypes.ErrInValues
|
||||||
|
}
|
||||||
|
conditions := []string{}
|
||||||
|
for _, value := range values {
|
||||||
|
conditions = append(conditions, sb.E(fieldExpression, value))
|
||||||
|
}
|
||||||
|
return sb.Or(conditions...), nil
|
||||||
|
case qbtypes.FilterOperatorNotIn:
|
||||||
|
values, ok := value.([]any)
|
||||||
|
if !ok {
|
||||||
|
return "", qbtypes.ErrInValues
|
||||||
|
}
|
||||||
|
conditions := []string{}
|
||||||
|
for _, value := range values {
|
||||||
|
conditions = append(conditions, sb.NE(fieldExpression, value))
|
||||||
|
}
|
||||||
|
return sb.And(conditions...), nil
|
||||||
|
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
|
||||||
|
var value any
|
||||||
|
column := columns[0]
|
||||||
|
|
||||||
|
switch column.Type.GetType() {
|
||||||
|
case schema.ColumnTypeEnumJSON:
|
||||||
|
if operator == qbtypes.FilterOperatorExists {
|
||||||
|
return sb.IsNotNull(fieldExpression), nil
|
||||||
|
}
|
||||||
|
return sb.IsNull(fieldExpression), nil
|
||||||
|
case schema.ColumnTypeEnumLowCardinality:
|
||||||
|
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
|
||||||
|
case schema.ColumnTypeEnumString:
|
||||||
|
value = ""
|
||||||
|
if operator == qbtypes.FilterOperatorExists {
|
||||||
|
return sb.NE(fieldExpression, value), nil
|
||||||
|
}
|
||||||
|
return sb.E(fieldExpression, value), nil
|
||||||
|
default:
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
|
||||||
|
}
|
||||||
|
case schema.ColumnTypeEnumString:
|
||||||
|
value = ""
|
||||||
|
if operator == qbtypes.FilterOperatorExists {
|
||||||
|
return sb.NE(fieldExpression, value), nil
|
||||||
|
}
|
||||||
|
return sb.E(fieldExpression, value), nil
|
||||||
|
case schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
|
||||||
|
value = 0
|
||||||
|
if operator == qbtypes.FilterOperatorExists {
|
||||||
|
return sb.NE(fieldExpression, value), nil
|
||||||
|
}
|
||||||
|
return sb.E(fieldExpression, value), nil
|
||||||
|
case schema.ColumnTypeEnumMap:
|
||||||
|
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||||
|
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||||
|
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||||
|
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
|
||||||
|
if key.Materialized {
|
||||||
|
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
|
||||||
|
}
|
||||||
|
if operator == qbtypes.FilterOperatorExists {
|
||||||
|
return sb.E(leftOperand, true), nil
|
||||||
|
}
|
||||||
|
return sb.NE(leftOperand, true), nil
|
||||||
|
default:
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported operator: %v", operator)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *conditionBuilder) ConditionFor(
|
||||||
|
ctx context.Context,
|
||||||
|
startNs uint64,
|
||||||
|
endNs uint64,
|
||||||
|
key *telemetrytypes.TelemetryFieldKey,
|
||||||
|
operator qbtypes.FilterOperator,
|
||||||
|
value any,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
) (string, error) {
|
||||||
|
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if key.FieldContext == telemetrytypes.FieldContextLog || key.FieldContext == telemetrytypes.FieldContextScope {
|
||||||
|
return condition, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if operator.AddDefaultExistsFilter() {
|
||||||
|
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return sb.And(condition, existsCondition), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return condition, nil
|
||||||
|
}
|
||||||
129
pkg/telemetryaudit/const.go
Normal file
129
pkg/telemetryaudit/const.go
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
package telemetryaudit
|
||||||
|
|
||||||
|
import (
|
||||||
|
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Internal Columns.
|
||||||
|
IDColumn = "id"
|
||||||
|
TimestampBucketStartColumn = "ts_bucket_start"
|
||||||
|
ResourceFingerPrintColumn = "resource_fingerprint"
|
||||||
|
|
||||||
|
// Intrinsic Columns.
|
||||||
|
TimestampColumn = "timestamp"
|
||||||
|
ObservedTimestampColumn = "observed_timestamp"
|
||||||
|
BodyColumn = "body"
|
||||||
|
EventNameColumn = "event_name"
|
||||||
|
TraceIDColumn = "trace_id"
|
||||||
|
SpanIDColumn = "span_id"
|
||||||
|
TraceFlagsColumn = "trace_flags"
|
||||||
|
SeverityTextColumn = "severity_text"
|
||||||
|
SeverityNumberColumn = "severity_number"
|
||||||
|
ScopeNameColumn = "scope_name"
|
||||||
|
ScopeVersionColumn = "scope_version"
|
||||||
|
|
||||||
|
// Contextual Columns.
|
||||||
|
AttributesStringColumn = "attributes_string"
|
||||||
|
AttributesNumberColumn = "attributes_number"
|
||||||
|
AttributesBoolColumn = "attributes_bool"
|
||||||
|
ResourceColumn = "resource"
|
||||||
|
ScopeStringColumn = "scope_string"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: "body",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
}
|
||||||
|
|
||||||
|
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||||
|
"body": {
|
||||||
|
Name: "body",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
"trace_id": {
|
||||||
|
Name: "trace_id",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
"span_id": {
|
||||||
|
Name: "span_id",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
"trace_flags": {
|
||||||
|
Name: "trace_flags",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||||
|
},
|
||||||
|
"severity_text": {
|
||||||
|
Name: "severity_text",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
"severity_number": {
|
||||||
|
Name: "severity_number",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||||
|
},
|
||||||
|
"event_name": {
|
||||||
|
Name: "event_name",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
DefaultSortingOrder = []qbtypes.OrderBy{
|
||||||
|
{
|
||||||
|
Key: qbtypes.OrderByKey{
|
||||||
|
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: TimestampColumn,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Direction: qbtypes.OrderDirectionDesc,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: qbtypes.OrderByKey{
|
||||||
|
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: IDColumn,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Direction: qbtypes.OrderDirectionDesc,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
var auditLogColumns = map[string]*schema.Column{
|
||||||
|
"ts_bucket_start": {Name: "ts_bucket_start", Type: schema.ColumnTypeUInt64},
|
||||||
|
"resource_fingerprint": {Name: "resource_fingerprint", Type: schema.ColumnTypeString},
|
||||||
|
"timestamp": {Name: "timestamp", Type: schema.ColumnTypeUInt64},
|
||||||
|
"observed_timestamp": {Name: "observed_timestamp", Type: schema.ColumnTypeUInt64},
|
||||||
|
"id": {Name: "id", Type: schema.ColumnTypeString},
|
||||||
|
"trace_id": {Name: "trace_id", Type: schema.ColumnTypeString},
|
||||||
|
"span_id": {Name: "span_id", Type: schema.ColumnTypeString},
|
||||||
|
"trace_flags": {Name: "trace_flags", Type: schema.ColumnTypeUInt32},
|
||||||
|
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||||
|
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||||
|
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||||
|
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeString}},
|
||||||
|
"attributes_number": {Name: "attributes_number", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeFloat64}},
|
||||||
|
"attributes_bool": {Name: "attributes_bool", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeBool}},
|
||||||
|
"resource": {Name: "resource", Type: schema.JSONColumnType{}},
|
||||||
|
"event_name": {Name: "event_name", Type: schema.ColumnTypeString},
|
||||||
|
"scope_name": {Name: "scope_name", Type: schema.ColumnTypeString},
|
||||||
|
"scope_version": {Name: "scope_version", Type: schema.ColumnTypeString},
|
||||||
|
"scope_string": {Name: "scope_string", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeString}},
|
||||||
|
}
|
||||||
124
pkg/telemetryaudit/field_mapper.go
Normal file
124
pkg/telemetryaudit/field_mapper.go
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
package telemetryaudit
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
|
||||||
|
"golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
type fieldMapper struct{}
|
||||||
|
|
||||||
|
func NewFieldMapper() qbtypes.FieldMapper {
|
||||||
|
return &fieldMapper{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||||
|
switch key.FieldContext {
|
||||||
|
case telemetrytypes.FieldContextResource:
|
||||||
|
return []*schema.Column{auditLogColumns["resource"]}, nil
|
||||||
|
case telemetrytypes.FieldContextScope:
|
||||||
|
switch key.Name {
|
||||||
|
case "name", "scope.name", "scope_name":
|
||||||
|
return []*schema.Column{auditLogColumns["scope_name"]}, nil
|
||||||
|
case "version", "scope.version", "scope_version":
|
||||||
|
return []*schema.Column{auditLogColumns["scope_version"]}, nil
|
||||||
|
}
|
||||||
|
return []*schema.Column{auditLogColumns["scope_string"]}, nil
|
||||||
|
case telemetrytypes.FieldContextAttribute:
|
||||||
|
switch key.FieldDataType {
|
||||||
|
case telemetrytypes.FieldDataTypeString:
|
||||||
|
return []*schema.Column{auditLogColumns["attributes_string"]}, nil
|
||||||
|
case telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeNumber:
|
||||||
|
return []*schema.Column{auditLogColumns["attributes_number"]}, nil
|
||||||
|
case telemetrytypes.FieldDataTypeBool:
|
||||||
|
return []*schema.Column{auditLogColumns["attributes_bool"]}, nil
|
||||||
|
}
|
||||||
|
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
|
||||||
|
col, ok := auditLogColumns[key.Name]
|
||||||
|
if !ok {
|
||||||
|
return nil, qbtypes.ErrColumnNotFound
|
||||||
|
}
|
||||||
|
return []*schema.Column{col}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, qbtypes.ErrColumnNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *fieldMapper) FieldFor(ctx context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
|
||||||
|
columns, err := m.getColumn(ctx, key)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if len(columns) != 1 {
|
||||||
|
return "", errors.Newf(errors.TypeInternal, errors.CodeInternal, "expected exactly 1 column, got %d", len(columns))
|
||||||
|
}
|
||||||
|
column := columns[0]
|
||||||
|
|
||||||
|
switch column.Type.GetType() {
|
||||||
|
case schema.ColumnTypeEnumJSON:
|
||||||
|
if key.FieldContext != telemetrytypes.FieldContextResource {
|
||||||
|
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns in audit, got %s", key.FieldContext.String)
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s.`%s`::String", column.Name, key.Name), nil
|
||||||
|
case schema.ColumnTypeEnumLowCardinality:
|
||||||
|
return column.Name, nil
|
||||||
|
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
|
||||||
|
return column.Name, nil
|
||||||
|
case schema.ColumnTypeEnumMap:
|
||||||
|
keyType := column.Type.(schema.MapColumnType).KeyType
|
||||||
|
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
|
||||||
|
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
|
||||||
|
if key.Materialized {
|
||||||
|
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
|
||||||
|
}
|
||||||
|
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
|
||||||
|
default:
|
||||||
|
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported map value type %s", valueType)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return column.Name, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *fieldMapper) ColumnFor(ctx context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
|
||||||
|
return m.getColumn(ctx, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *fieldMapper) ColumnExpressionFor(
|
||||||
|
ctx context.Context,
|
||||||
|
tsStart, tsEnd uint64,
|
||||||
|
field *telemetrytypes.TelemetryFieldKey,
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
) (string, error) {
|
||||||
|
fieldExpression, err := m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||||
|
if errors.Is(err, qbtypes.ErrColumnNotFound) {
|
||||||
|
keysForField := keys[field.Name]
|
||||||
|
if len(keysForField) == 0 {
|
||||||
|
if _, ok := auditLogColumns[field.Name]; ok {
|
||||||
|
field.FieldContext = telemetrytypes.FieldContextLog
|
||||||
|
fieldExpression, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
|
||||||
|
} else {
|
||||||
|
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
|
||||||
|
if found {
|
||||||
|
return "", errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
|
||||||
|
}
|
||||||
|
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fieldExpression, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(fieldExpression), field.Name), nil
|
||||||
|
}
|
||||||
612
pkg/telemetryaudit/statement_builder.go
Normal file
612
pkg/telemetryaudit/statement_builder.go
Normal file
@@ -0,0 +1,612 @@
|
|||||||
|
package telemetryaudit
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"log/slog"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/huandu/go-sqlbuilder"
|
||||||
|
)
|
||||||
|
|
||||||
|
type auditQueryStatementBuilder struct {
|
||||||
|
logger *slog.Logger
|
||||||
|
metadataStore telemetrytypes.MetadataStore
|
||||||
|
fm qbtypes.FieldMapper
|
||||||
|
cb qbtypes.ConditionBuilder
|
||||||
|
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
|
||||||
|
aggExprRewriter qbtypes.AggExprRewriter
|
||||||
|
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||||
|
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*auditQueryStatementBuilder)(nil)
|
||||||
|
|
||||||
|
func NewAuditQueryStatementBuilder(
|
||||||
|
settings factory.ProviderSettings,
|
||||||
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
|
fieldMapper qbtypes.FieldMapper,
|
||||||
|
conditionBuilder qbtypes.ConditionBuilder,
|
||||||
|
aggExprRewriter qbtypes.AggExprRewriter,
|
||||||
|
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||||
|
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||||
|
) *auditQueryStatementBuilder {
|
||||||
|
auditSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetryaudit")
|
||||||
|
|
||||||
|
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.LogAggregation](
|
||||||
|
settings,
|
||||||
|
DBName,
|
||||||
|
LogsResourceTableName,
|
||||||
|
telemetrytypes.SignalLogs,
|
||||||
|
telemetrytypes.SourceAudit,
|
||||||
|
metadataStore,
|
||||||
|
fullTextColumn,
|
||||||
|
jsonKeyToKey,
|
||||||
|
)
|
||||||
|
|
||||||
|
return &auditQueryStatementBuilder{
|
||||||
|
logger: auditSettings.Logger(),
|
||||||
|
metadataStore: metadataStore,
|
||||||
|
fm: fieldMapper,
|
||||||
|
cb: conditionBuilder,
|
||||||
|
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
|
||||||
|
aggExprRewriter: aggExprRewriter,
|
||||||
|
fullTextColumn: fullTextColumn,
|
||||||
|
jsonKeyToKey: jsonKeyToKey,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) Build(
|
||||||
|
ctx context.Context,
|
||||||
|
start uint64,
|
||||||
|
end uint64,
|
||||||
|
requestType qbtypes.RequestType,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
variables map[string]qbtypes.VariableItem,
|
||||||
|
) (*qbtypes.Statement, error) {
|
||||||
|
start = querybuilder.ToNanoSecs(start)
|
||||||
|
end = querybuilder.ToNanoSecs(end)
|
||||||
|
|
||||||
|
keySelectors := getKeySelectors(query)
|
||||||
|
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
query = b.adjustKeys(ctx, keys, query, requestType)
|
||||||
|
|
||||||
|
q := sqlbuilder.NewSelectBuilder()
|
||||||
|
|
||||||
|
var stmt *qbtypes.Statement
|
||||||
|
switch requestType {
|
||||||
|
case qbtypes.RequestTypeRaw, qbtypes.RequestTypeRawStream:
|
||||||
|
stmt, err = b.buildListQuery(ctx, q, query, start, end, keys, variables)
|
||||||
|
case qbtypes.RequestTypeTimeSeries:
|
||||||
|
stmt, err = b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
|
||||||
|
case qbtypes.RequestTypeScalar:
|
||||||
|
stmt, err = b.buildScalarQuery(ctx, q, query, start, end, keys, false, variables)
|
||||||
|
default:
|
||||||
|
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported request type: %s", requestType)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []*telemetrytypes.FieldKeySelector {
|
||||||
|
var keySelectors []*telemetrytypes.FieldKeySelector
|
||||||
|
|
||||||
|
for idx := range query.Aggregations {
|
||||||
|
aggExpr := query.Aggregations[idx]
|
||||||
|
selectors := querybuilder.QueryStringToKeysSelectors(aggExpr.Expression)
|
||||||
|
keySelectors = append(keySelectors, selectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
|
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
|
||||||
|
keySelectors = append(keySelectors, whereClauseSelectors...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.GroupBy {
|
||||||
|
groupBy := query.GroupBy[idx]
|
||||||
|
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||||
|
Name: groupBy.Name,
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: groupBy.FieldContext,
|
||||||
|
FieldDataType: groupBy.FieldDataType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.SelectFields {
|
||||||
|
selectField := query.SelectFields[idx]
|
||||||
|
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||||
|
Name: selectField.Name,
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: selectField.FieldContext,
|
||||||
|
FieldDataType: selectField.FieldDataType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range query.Order {
|
||||||
|
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
|
||||||
|
Name: query.Order[idx].Key.Name,
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: query.Order[idx].Key.FieldContext,
|
||||||
|
FieldDataType: query.Order[idx].Key.FieldDataType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range keySelectors {
|
||||||
|
keySelectors[idx].Signal = telemetrytypes.SignalLogs
|
||||||
|
keySelectors[idx].Source = telemetrytypes.SourceAudit
|
||||||
|
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||||
|
}
|
||||||
|
|
||||||
|
return keySelectors
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], requestType qbtypes.RequestType) qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] {
|
||||||
|
keys["id"] = append([]*telemetrytypes.TelemetryFieldKey{{
|
||||||
|
Name: "id",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||||
|
}}, keys["id"]...)
|
||||||
|
|
||||||
|
keys["timestamp"] = append([]*telemetrytypes.TelemetryFieldKey{{
|
||||||
|
Name: "timestamp",
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: telemetrytypes.FieldContextLog,
|
||||||
|
FieldDataType: telemetrytypes.FieldDataTypeNumber,
|
||||||
|
}}, keys["timestamp"]...)
|
||||||
|
|
||||||
|
actions := querybuilder.AdjustKeysForAliasExpressions(&query, requestType)
|
||||||
|
actions = append(actions, querybuilder.AdjustDuplicateKeys(&query)...)
|
||||||
|
|
||||||
|
for idx := range query.SelectFields {
|
||||||
|
actions = append(actions, b.adjustKey(&query.SelectFields[idx], keys)...)
|
||||||
|
}
|
||||||
|
for idx := range query.GroupBy {
|
||||||
|
actions = append(actions, b.adjustKey(&query.GroupBy[idx].TelemetryFieldKey, keys)...)
|
||||||
|
}
|
||||||
|
for idx := range query.Order {
|
||||||
|
actions = append(actions, b.adjustKey(&query.Order[idx].Key.TelemetryFieldKey, keys)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, action := range actions {
|
||||||
|
b.logger.InfoContext(ctx, "key adjustment action", slog.String("action", action))
|
||||||
|
}
|
||||||
|
|
||||||
|
return query
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
|
||||||
|
if _, ok := IntrinsicFields[key.Name]; ok {
|
||||||
|
intrinsicField := IntrinsicFields[key.Name]
|
||||||
|
return querybuilder.AdjustKey(key, keys, &intrinsicField)
|
||||||
|
}
|
||||||
|
return querybuilder.AdjustKey(key, keys, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) buildListQuery(
|
||||||
|
ctx context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
start, end uint64,
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
variables map[string]qbtypes.VariableItem,
|
||||||
|
) (*qbtypes.Statement, error) {
|
||||||
|
var (
|
||||||
|
cteFragments []string
|
||||||
|
cteArgs [][]any
|
||||||
|
)
|
||||||
|
|
||||||
|
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if frag != "" {
|
||||||
|
cteFragments = append(cteFragments, frag)
|
||||||
|
cteArgs = append(cteArgs, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.Select(TimestampColumn)
|
||||||
|
sb.SelectMore(IDColumn)
|
||||||
|
if len(query.SelectFields) == 0 {
|
||||||
|
sb.SelectMore(TraceIDColumn)
|
||||||
|
sb.SelectMore(SpanIDColumn)
|
||||||
|
sb.SelectMore(TraceFlagsColumn)
|
||||||
|
sb.SelectMore(SeverityTextColumn)
|
||||||
|
sb.SelectMore(SeverityNumberColumn)
|
||||||
|
sb.SelectMore(ScopeNameColumn)
|
||||||
|
sb.SelectMore(ScopeVersionColumn)
|
||||||
|
sb.SelectMore(BodyColumn)
|
||||||
|
sb.SelectMore(EventNameColumn)
|
||||||
|
sb.SelectMore(AttributesStringColumn)
|
||||||
|
sb.SelectMore(AttributesNumberColumn)
|
||||||
|
sb.SelectMore(AttributesBoolColumn)
|
||||||
|
sb.SelectMore(ResourceColumn)
|
||||||
|
sb.SelectMore(ScopeStringColumn)
|
||||||
|
} else {
|
||||||
|
for index := range query.SelectFields {
|
||||||
|
if query.SelectFields[index].Name == TimestampColumn || query.SelectFields[index].Name == IDColumn {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sb.SelectMore(colExpr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.From(fmt.Sprintf("%s.%s", DBName, AuditLogsTableName))
|
||||||
|
|
||||||
|
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, orderBy := range query.Order {
|
||||||
|
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sb.OrderBy(fmt.Sprintf("%s %s", colExpr, orderBy.Direction.StringValue()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if query.Limit > 0 {
|
||||||
|
sb.Limit(query.Limit)
|
||||||
|
} else {
|
||||||
|
sb.Limit(100)
|
||||||
|
}
|
||||||
|
|
||||||
|
if query.Offset > 0 {
|
||||||
|
sb.Offset(query.Offset)
|
||||||
|
}
|
||||||
|
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
|
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
|
stmt := &qbtypes.Statement{
|
||||||
|
Query: finalSQL,
|
||||||
|
Args: finalArgs,
|
||||||
|
}
|
||||||
|
if preparedWhereClause != nil {
|
||||||
|
stmt.Warnings = preparedWhereClause.Warnings
|
||||||
|
stmt.WarningsDocURL = preparedWhereClause.WarningsDocURL
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) buildTimeSeriesQuery(
|
||||||
|
ctx context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
start, end uint64,
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
variables map[string]qbtypes.VariableItem,
|
||||||
|
) (*qbtypes.Statement, error) {
|
||||||
|
var (
|
||||||
|
cteFragments []string
|
||||||
|
cteArgs [][]any
|
||||||
|
)
|
||||||
|
|
||||||
|
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if frag != "" {
|
||||||
|
cteFragments = append(cteFragments, frag)
|
||||||
|
cteArgs = append(cteArgs, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.SelectMore(fmt.Sprintf(
|
||||||
|
"toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL %d SECOND) AS ts",
|
||||||
|
int64(query.StepInterval.Seconds()),
|
||||||
|
))
|
||||||
|
|
||||||
|
var allGroupByArgs []any
|
||||||
|
|
||||||
|
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||||
|
for _, gb := range query.GroupBy {
|
||||||
|
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.Name)
|
||||||
|
allGroupByArgs = append(allGroupByArgs, args...)
|
||||||
|
sb.SelectMore(colExpr)
|
||||||
|
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
allAggChArgs := make([]any, 0)
|
||||||
|
for i, agg := range query.Aggregations {
|
||||||
|
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, start, end, agg.Expression, uint64(query.StepInterval.Seconds()), keys)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
allAggChArgs = append(allAggChArgs, chArgs...)
|
||||||
|
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.From(fmt.Sprintf("%s.%s", DBName, AuditLogsTableName))
|
||||||
|
|
||||||
|
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var finalSQL string
|
||||||
|
var finalArgs []any
|
||||||
|
|
||||||
|
if query.Limit > 0 && len(query.GroupBy) > 0 {
|
||||||
|
cteSB := sqlbuilder.NewSelectBuilder()
|
||||||
|
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true, variables)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
cteFragments = append(cteFragments, fmt.Sprintf("__limit_cte AS (%s)", cteStmt.Query))
|
||||||
|
cteArgs = append(cteArgs, cteStmt.Args)
|
||||||
|
|
||||||
|
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
|
||||||
|
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
|
||||||
|
|
||||||
|
sb.GroupBy("ts")
|
||||||
|
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||||
|
if query.Having != nil && query.Having.Expression != "" {
|
||||||
|
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||||
|
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sb.Having(rewrittenExpr)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(query.Order) != 0 {
|
||||||
|
for _, orderBy := range query.Order {
|
||||||
|
_, ok := aggOrderBy(orderBy, query)
|
||||||
|
if !ok {
|
||||||
|
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.OrderBy("ts desc")
|
||||||
|
}
|
||||||
|
|
||||||
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
|
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
|
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
} else {
|
||||||
|
sb.GroupBy("ts")
|
||||||
|
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||||
|
if query.Having != nil && query.Having.Expression != "" {
|
||||||
|
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||||
|
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sb.Having(rewrittenExpr)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(query.Order) != 0 {
|
||||||
|
for _, orderBy := range query.Order {
|
||||||
|
_, ok := aggOrderBy(orderBy, query)
|
||||||
|
if !ok {
|
||||||
|
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.OrderBy("ts desc")
|
||||||
|
}
|
||||||
|
|
||||||
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
|
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
|
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
}
|
||||||
|
|
||||||
|
stmt := &qbtypes.Statement{
|
||||||
|
Query: finalSQL,
|
||||||
|
Args: finalArgs,
|
||||||
|
}
|
||||||
|
if preparedWhereClause != nil {
|
||||||
|
stmt.Warnings = preparedWhereClause.Warnings
|
||||||
|
stmt.WarningsDocURL = preparedWhereClause.WarningsDocURL
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) buildScalarQuery(
|
||||||
|
ctx context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
start, end uint64,
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
skipResourceCTE bool,
|
||||||
|
variables map[string]qbtypes.VariableItem,
|
||||||
|
) (*qbtypes.Statement, error) {
|
||||||
|
var (
|
||||||
|
cteFragments []string
|
||||||
|
cteArgs [][]any
|
||||||
|
)
|
||||||
|
|
||||||
|
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if frag != "" && !skipResourceCTE {
|
||||||
|
cteFragments = append(cteFragments, frag)
|
||||||
|
cteArgs = append(cteArgs, args)
|
||||||
|
}
|
||||||
|
|
||||||
|
allAggChArgs := []any{}
|
||||||
|
|
||||||
|
var allGroupByArgs []any
|
||||||
|
|
||||||
|
for _, gb := range query.GroupBy {
|
||||||
|
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.Name)
|
||||||
|
allGroupByArgs = append(allGroupByArgs, args...)
|
||||||
|
sb.SelectMore(colExpr)
|
||||||
|
}
|
||||||
|
|
||||||
|
rateInterval := (end - start) / querybuilder.NsToSeconds
|
||||||
|
|
||||||
|
if len(query.Aggregations) > 0 {
|
||||||
|
for idx := range query.Aggregations {
|
||||||
|
aggExpr := query.Aggregations[idx]
|
||||||
|
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, start, end, aggExpr.Expression, rateInterval, keys)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
allAggChArgs = append(allAggChArgs, chArgs...)
|
||||||
|
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, idx))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.From(fmt.Sprintf("%s.%s", DBName, AuditLogsTableName))
|
||||||
|
|
||||||
|
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||||
|
|
||||||
|
if query.Having != nil && query.Having.Expression != "" {
|
||||||
|
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||||
|
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sb.Having(rewrittenExpr)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, orderBy := range query.Order {
|
||||||
|
idx, ok := aggOrderBy(orderBy, query)
|
||||||
|
if ok {
|
||||||
|
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
|
||||||
|
} else {
|
||||||
|
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(query.Order) == 0 {
|
||||||
|
sb.OrderBy("__result_0 DESC")
|
||||||
|
}
|
||||||
|
|
||||||
|
if query.Limit > 0 {
|
||||||
|
sb.Limit(query.Limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
combinedArgs := append(allGroupByArgs, allAggChArgs...)
|
||||||
|
|
||||||
|
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
|
||||||
|
|
||||||
|
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
|
||||||
|
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
|
||||||
|
|
||||||
|
stmt := &qbtypes.Statement{
|
||||||
|
Query: finalSQL,
|
||||||
|
Args: finalArgs,
|
||||||
|
}
|
||||||
|
if preparedWhereClause != nil {
|
||||||
|
stmt.Warnings = preparedWhereClause.Warnings
|
||||||
|
stmt.WarningsDocURL = preparedWhereClause.WarningsDocURL
|
||||||
|
}
|
||||||
|
|
||||||
|
return stmt, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) addFilterCondition(
|
||||||
|
ctx context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
start, end uint64,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||||
|
variables map[string]qbtypes.VariableItem,
|
||||||
|
) (*querybuilder.PreparedWhereClause, error) {
|
||||||
|
var preparedWhereClause *querybuilder.PreparedWhereClause
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if query.Filter != nil && query.Filter.Expression != "" {
|
||||||
|
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
|
||||||
|
Context: ctx,
|
||||||
|
Logger: b.logger,
|
||||||
|
FieldMapper: b.fm,
|
||||||
|
ConditionBuilder: b.cb,
|
||||||
|
FieldKeys: keys,
|
||||||
|
SkipResourceFilter: true,
|
||||||
|
FullTextColumn: b.fullTextColumn,
|
||||||
|
JsonKeyToKey: b.jsonKeyToKey,
|
||||||
|
Variables: variables,
|
||||||
|
StartNs: start,
|
||||||
|
EndNs: end,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if preparedWhereClause != nil {
|
||||||
|
sb.AddWhereClause(preparedWhereClause.WhereClause)
|
||||||
|
}
|
||||||
|
|
||||||
|
startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
|
||||||
|
var endBucket uint64
|
||||||
|
if end != 0 {
|
||||||
|
endBucket = end / querybuilder.NsToSeconds
|
||||||
|
}
|
||||||
|
|
||||||
|
if start != 0 {
|
||||||
|
sb.Where(sb.GE("timestamp", fmt.Sprintf("%d", start)), sb.GE("ts_bucket_start", startBucket))
|
||||||
|
}
|
||||||
|
if end != 0 {
|
||||||
|
sb.Where(sb.L("timestamp", fmt.Sprintf("%d", end)), sb.LE("ts_bucket_start", endBucket))
|
||||||
|
}
|
||||||
|
|
||||||
|
return preparedWhereClause, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) (int, bool) {
|
||||||
|
for i, agg := range q.Aggregations {
|
||||||
|
if k.Key.Name == agg.Alias || k.Key.Name == agg.Expression || k.Key.Name == fmt.Sprintf("%d", i) {
|
||||||
|
return i, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *auditQueryStatementBuilder) maybeAttachResourceFilter(
|
||||||
|
ctx context.Context,
|
||||||
|
sb *sqlbuilder.SelectBuilder,
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
|
||||||
|
start, end uint64,
|
||||||
|
variables map[string]qbtypes.VariableItem,
|
||||||
|
) (cteSQL string, cteArgs []any, err error) {
|
||||||
|
stmt, err := b.resourceFilterStmtBuilder.Build(ctx, start, end, qbtypes.RequestTypeRaw, query, variables)
|
||||||
|
if err != nil {
|
||||||
|
return "", nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
|
||||||
|
|
||||||
|
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
|
||||||
|
}
|
||||||
223
pkg/telemetryaudit/statement_builder_test.go
Normal file
223
pkg/telemetryaudit/statement_builder_test.go
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
package telemetryaudit
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||||
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func auditFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||||
|
key := func(name string, ctx telemetrytypes.FieldContext, dt telemetrytypes.FieldDataType, materialized bool) *telemetrytypes.TelemetryFieldKey {
|
||||||
|
return &telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: name,
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: ctx,
|
||||||
|
FieldDataType: dt,
|
||||||
|
Materialized: materialized,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
attr := telemetrytypes.FieldContextAttribute
|
||||||
|
res := telemetrytypes.FieldContextResource
|
||||||
|
str := telemetrytypes.FieldDataTypeString
|
||||||
|
i64 := telemetrytypes.FieldDataTypeInt64
|
||||||
|
|
||||||
|
return map[string][]*telemetrytypes.TelemetryFieldKey{
|
||||||
|
"service.name": {key("service.name", res, str, false)},
|
||||||
|
"signoz.audit.action": {key("signoz.audit.action", attr, str, true)},
|
||||||
|
"signoz.audit.outcome": {key("signoz.audit.outcome", attr, str, true)},
|
||||||
|
"signoz.audit.principal.email": {key("signoz.audit.principal.email", attr, str, true)},
|
||||||
|
"signoz.audit.principal.id": {key("signoz.audit.principal.id", attr, str, true)},
|
||||||
|
"signoz.audit.principal.type": {key("signoz.audit.principal.type", attr, str, true)},
|
||||||
|
"signoz.audit.resource.kind": {key("signoz.audit.resource.kind", res, str, false)},
|
||||||
|
"signoz.audit.resource.id": {key("signoz.audit.resource.id", res, str, false)},
|
||||||
|
"signoz.audit.action_category": {key("signoz.audit.action_category", attr, str, false)},
|
||||||
|
"signoz.audit.error.type": {key("signoz.audit.error.type", attr, str, false)},
|
||||||
|
"signoz.audit.error.code": {key("signoz.audit.error.code", attr, str, false)},
|
||||||
|
"http.request.method": {key("http.request.method", attr, str, false)},
|
||||||
|
"http.response.status_code": {key("http.response.status_code", attr, i64, false)},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTestAuditStatementBuilder() *auditQueryStatementBuilder {
|
||||||
|
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||||
|
mockMetadataStore.KeysMap = auditFieldKeyMap()
|
||||||
|
|
||||||
|
fm := NewFieldMapper()
|
||||||
|
cb := NewConditionBuilder(fm)
|
||||||
|
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||||
|
|
||||||
|
return NewAuditQueryStatementBuilder(
|
||||||
|
instrumentationtest.New().ToProviderSettings(),
|
||||||
|
mockMetadataStore,
|
||||||
|
fm,
|
||||||
|
cb,
|
||||||
|
aggExprRewriter,
|
||||||
|
DefaultFullTextColumn,
|
||||||
|
nil,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestStatementBuilder(t *testing.T) {
|
||||||
|
statementBuilder := newTestAuditStatementBuilder()
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
name string
|
||||||
|
requestType qbtypes.RequestType
|
||||||
|
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||||
|
expected qbtypes.Statement
|
||||||
|
expectedErr error
|
||||||
|
}{
|
||||||
|
// List: all actions by a specific user (materialized principal.id filter)
|
||||||
|
{
|
||||||
|
name: "ListByPrincipalID",
|
||||||
|
requestType: qbtypes.RequestTypeRaw,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.principal.id = '019a-1234-abcd-5678'",
|
||||||
|
},
|
||||||
|
Limit: 100,
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$principal$$id` = ? AND `attribute_string_signoz$$audit$$principal$$id_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||||
|
Args: []any{uint64(1747945619), uint64(1747983448), "019a-1234-abcd-5678", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// List: all failed actions (materialized outcome filter)
|
||||||
|
{
|
||||||
|
name: "ListByOutcomeFailure",
|
||||||
|
requestType: qbtypes.RequestTypeRaw,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.outcome = 'failure'",
|
||||||
|
},
|
||||||
|
Limit: 100,
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||||
|
Args: []any{uint64(1747945619), uint64(1747983448), "failure", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// List: change history of a specific dashboard (two materialized column AND)
|
||||||
|
{
|
||||||
|
name: "ListByResourceKindAndID",
|
||||||
|
requestType: qbtypes.RequestTypeRaw,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.resource.kind = 'dashboard' AND signoz.audit.resource.id = '019b-5678-efgh-9012'",
|
||||||
|
},
|
||||||
|
Limit: 100,
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE ((simpleJSONExtractString(labels, 'signoz.audit.resource.kind') = ? AND labels LIKE ? AND labels LIKE ?) AND (simpleJSONExtractString(labels, 'signoz.audit.resource.id') = ? AND labels LIKE ? AND labels LIKE ?)) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||||
|
Args: []any{"dashboard", "%signoz.audit.resource.kind%", "%signoz.audit.resource.kind\":\"dashboard%", "019b-5678-efgh-9012", "%signoz.audit.resource.id%", "%signoz.audit.resource.id\":\"019b-5678-efgh-9012%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// List: all dashboard deletions (compliance — resource.kind + action AND)
|
||||||
|
{
|
||||||
|
name: "ListByResourceKindAndAction",
|
||||||
|
requestType: qbtypes.RequestTypeRaw,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.resource.kind = 'dashboard' AND signoz.audit.action = 'delete'",
|
||||||
|
},
|
||||||
|
Limit: 100,
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE (simpleJSONExtractString(labels, 'signoz.audit.resource.kind') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$action` = ? AND `attribute_string_signoz$$audit$$action_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||||
|
Args: []any{"dashboard", "%signoz.audit.resource.kind%", "%signoz.audit.resource.kind\":\"dashboard%", uint64(1747945619), uint64(1747983448), "delete", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// List: all actions by service accounts (materialized principal.type)
|
||||||
|
{
|
||||||
|
name: "ListByPrincipalType",
|
||||||
|
requestType: qbtypes.RequestTypeRaw,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.principal.type = 'service_account'",
|
||||||
|
},
|
||||||
|
Limit: 100,
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$principal$$type` = ? AND `attribute_string_signoz$$audit$$principal$$type_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||||
|
Args: []any{uint64(1747945619), uint64(1747983448), "service_account", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// Scalar: alert — count forbidden errors (outcome + action AND)
|
||||||
|
{
|
||||||
|
name: "ScalarCountByOutcomeAndAction",
|
||||||
|
requestType: qbtypes.RequestTypeScalar,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.outcome = 'failure' AND signoz.audit.action = 'update'",
|
||||||
|
},
|
||||||
|
Aggregations: []qbtypes.LogAggregation{
|
||||||
|
{Expression: "count()"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT count() AS __result_0 FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND (`attribute_string_signoz$$audit$$action` = ? AND `attribute_string_signoz$$audit$$action_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY __result_0 DESC",
|
||||||
|
Args: []any{uint64(1747945619), uint64(1747983448), "failure", true, "update", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// TimeSeries: failures grouped by principal email with top-N limit
|
||||||
|
{
|
||||||
|
name: "TimeSeriesFailuresGroupedByPrincipal",
|
||||||
|
requestType: qbtypes.RequestTypeTimeSeries,
|
||||||
|
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
Source: telemetrytypes.SourceAudit,
|
||||||
|
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
|
||||||
|
Aggregations: []qbtypes.LogAggregation{
|
||||||
|
{Expression: "count()"},
|
||||||
|
},
|
||||||
|
Filter: &qbtypes.Filter{
|
||||||
|
Expression: "signoz.audit.outcome = 'failure'",
|
||||||
|
},
|
||||||
|
GroupBy: []qbtypes.GroupByKey{
|
||||||
|
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "signoz.audit.principal.email"}},
|
||||||
|
},
|
||||||
|
Limit: 5,
|
||||||
|
},
|
||||||
|
expected: qbtypes.Statement{
|
||||||
|
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_signoz$$audit$$principal$$email_exists` = ?, `attribute_string_signoz$$audit$$principal$$email`, NULL)) AS `signoz.audit.principal.email`, count() AS __result_0 FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `signoz.audit.principal.email` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toString(multiIf(`attribute_string_signoz$$audit$$principal$$email_exists` = ?, `attribute_string_signoz$$audit$$principal$$email`, NULL)) AS `signoz.audit.principal.email`, count() AS __result_0 FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`signoz.audit.principal.email`) GLOBAL IN (SELECT `signoz.audit.principal.email` FROM __limit_cte) GROUP BY ts, `signoz.audit.principal.email`",
|
||||||
|
Args: []any{uint64(1747945619), uint64(1747983448), true, "failure", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 5, true, "failure", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
t.Run(testCase.name, func(t *testing.T) {
|
||||||
|
q, err := statementBuilder.Build(ctx, 1747947419000, 1747983448000, testCase.requestType, testCase.query, nil)
|
||||||
|
if testCase.expectedErr != nil {
|
||||||
|
require.Error(t, err)
|
||||||
|
require.Contains(t, err.Error(), testCase.expectedErr.Error())
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, testCase.expected.Query, q.Query)
|
||||||
|
require.Equal(t, testCase.expected.Args, q.Args)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
12
pkg/telemetryaudit/tables.go
Normal file
12
pkg/telemetryaudit/tables.go
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package telemetryaudit
|
||||||
|
|
||||||
|
const (
|
||||||
|
DBName = "signoz_audit"
|
||||||
|
AuditLogsTableName = "distributed_logs"
|
||||||
|
AuditLogsLocalTableName = "logs"
|
||||||
|
TagAttributesTableName = "distributed_tag_attributes"
|
||||||
|
TagAttributesLocalTableName = "tag_attributes"
|
||||||
|
LogAttributeKeysTblName = "distributed_logs_attribute_keys"
|
||||||
|
LogResourceKeysTblName = "distributed_logs_resource_keys"
|
||||||
|
LogsResourceTableName = "distributed_logs_resource"
|
||||||
|
)
|
||||||
@@ -631,7 +631,7 @@ func TestJSONStmtBuilder_ArrayPaths(t *testing.T) {
|
|||||||
filter: "hasAll(body.user.permissions, ['read', 'write'])",
|
filter: "hasAll(body.user.permissions, ['read', 'write'])",
|
||||||
expected: TestExpected{
|
expected: TestExpected{
|
||||||
WhereClause: "hasAll(dynamicElement(body_v2.`user.permissions`, 'Array(Nullable(String))'), ?)",
|
WhereClause: "hasAll(dynamicElement(body_v2.`user.permissions`, 'Array(Nullable(String))'), ?)",
|
||||||
Args: []any{uint64(1747945619), uint64(1747983448), []any{[]any{"read", "write"}}, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
Args: []any{uint64(1747945619), uint64(1747983448), []any{"read", "write"}, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -757,7 +757,7 @@ func TestJSONStmtBuilder_ArrayPaths(t *testing.T) {
|
|||||||
filter: "hasAny(education[].awards[].participated[].members, ['Piyush', 'Tushar'])",
|
filter: "hasAny(education[].awards[].participated[].members, ['Piyush', 'Tushar'])",
|
||||||
expected: TestExpected{
|
expected: TestExpected{
|
||||||
WhereClause: "hasAny(arrayFlatten(arrayConcat(arrayMap(`body_v2.education`->arrayConcat(arrayMap(`body_v2.education[].awards`->arrayConcat(arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(Dynamic)'))))), dynamicElement(`body_v2.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), arrayMap(`body_v2.education[].awards`->arrayConcat(arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')), arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(Dynamic)'))))), arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_v2.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_v2.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))), ?)",
|
WhereClause: "hasAny(arrayFlatten(arrayConcat(arrayMap(`body_v2.education`->arrayConcat(arrayMap(`body_v2.education[].awards`->arrayConcat(arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(Dynamic)'))))), dynamicElement(`body_v2.education`.`awards`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), arrayMap(`body_v2.education[].awards`->arrayConcat(arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')), arrayMap(`body_v2.education[].awards[].participated`->dynamicElement(`body_v2.education[].awards[].participated`.`members`, 'Array(Nullable(String))'), arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_v2.education[].awards`.`participated`, 'Array(Dynamic)'))))), arrayMap(x->assumeNotNull(dynamicElement(x, 'JSON')), arrayFilter(x->(dynamicType(x) = 'JSON'), dynamicElement(`body_v2.education`.`awards`, 'Array(Dynamic)'))))), dynamicElement(body_v2.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))), ?)",
|
||||||
Args: []any{uint64(1747945619), uint64(1747983448), []any{[]any{"Piyush", "Tushar"}}, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
Args: []any{uint64(1747945619), uint64(1747983448), []any{"Piyush", "Tushar"}, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -45,6 +45,7 @@ func NewLogQueryStatementBuilder(
|
|||||||
DBName,
|
DBName,
|
||||||
LogsResourceV2TableName,
|
LogsResourceV2TableName,
|
||||||
telemetrytypes.SignalLogs,
|
telemetrytypes.SignalLogs,
|
||||||
|
telemetrytypes.SourceUnspecified,
|
||||||
metadataStore,
|
metadataStore,
|
||||||
fullTextColumn,
|
fullTextColumn,
|
||||||
jsonKeyToKey,
|
jsonKeyToKey,
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/factory"
|
"github.com/SigNoz/signoz/pkg/factory"
|
||||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetryaudit"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||||
@@ -27,6 +28,7 @@ import (
|
|||||||
var (
|
var (
|
||||||
ErrFailedToGetTracesKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get traces keys")
|
ErrFailedToGetTracesKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get traces keys")
|
||||||
ErrFailedToGetLogsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get logs keys")
|
ErrFailedToGetLogsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get logs keys")
|
||||||
|
ErrFailedToGetAuditKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get audit keys")
|
||||||
ErrFailedToGetTblStatement = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get tbl statement")
|
ErrFailedToGetTblStatement = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get tbl statement")
|
||||||
ErrFailedToGetMetricsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get metrics keys")
|
ErrFailedToGetMetricsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get metrics keys")
|
||||||
ErrFailedToGetMeterKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get meter keys")
|
ErrFailedToGetMeterKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get meter keys")
|
||||||
@@ -50,6 +52,11 @@ type telemetryMetaStore struct {
|
|||||||
logAttributeKeysTblName string
|
logAttributeKeysTblName string
|
||||||
logResourceKeysTblName string
|
logResourceKeysTblName string
|
||||||
logsV2TblName string
|
logsV2TblName string
|
||||||
|
auditDBName string
|
||||||
|
auditLogsTblName string
|
||||||
|
auditFieldsTblName string
|
||||||
|
auditAttributeKeysTblName string
|
||||||
|
auditResourceKeysTblName string
|
||||||
relatedMetadataDBName string
|
relatedMetadataDBName string
|
||||||
relatedMetadataTblName string
|
relatedMetadataTblName string
|
||||||
columnEvolutionMetadataTblName string
|
columnEvolutionMetadataTblName string
|
||||||
@@ -79,6 +86,11 @@ func NewTelemetryMetaStore(
|
|||||||
logsFieldsTblName string,
|
logsFieldsTblName string,
|
||||||
logAttributeKeysTblName string,
|
logAttributeKeysTblName string,
|
||||||
logResourceKeysTblName string,
|
logResourceKeysTblName string,
|
||||||
|
auditDBName string,
|
||||||
|
auditLogsTblName string,
|
||||||
|
auditFieldsTblName string,
|
||||||
|
auditAttributeKeysTblName string,
|
||||||
|
auditResourceKeysTblName string,
|
||||||
relatedMetadataDBName string,
|
relatedMetadataDBName string,
|
||||||
relatedMetadataTblName string,
|
relatedMetadataTblName string,
|
||||||
columnEvolutionMetadataTblName string,
|
columnEvolutionMetadataTblName string,
|
||||||
@@ -101,6 +113,11 @@ func NewTelemetryMetaStore(
|
|||||||
logsFieldsTblName: logsFieldsTblName,
|
logsFieldsTblName: logsFieldsTblName,
|
||||||
logAttributeKeysTblName: logAttributeKeysTblName,
|
logAttributeKeysTblName: logAttributeKeysTblName,
|
||||||
logResourceKeysTblName: logResourceKeysTblName,
|
logResourceKeysTblName: logResourceKeysTblName,
|
||||||
|
auditDBName: auditDBName,
|
||||||
|
auditLogsTblName: auditLogsTblName,
|
||||||
|
auditFieldsTblName: auditFieldsTblName,
|
||||||
|
auditAttributeKeysTblName: auditAttributeKeysTblName,
|
||||||
|
auditResourceKeysTblName: auditResourceKeysTblName,
|
||||||
relatedMetadataDBName: relatedMetadataDBName,
|
relatedMetadataDBName: relatedMetadataDBName,
|
||||||
relatedMetadataTblName: relatedMetadataTblName,
|
relatedMetadataTblName: relatedMetadataTblName,
|
||||||
columnEvolutionMetadataTblName: columnEvolutionMetadataTblName,
|
columnEvolutionMetadataTblName: columnEvolutionMetadataTblName,
|
||||||
@@ -592,6 +609,227 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
|||||||
return keys, complete, nil
|
return keys, complete, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) auditTblStatementToFieldKeys(ctx context.Context) ([]*telemetrytypes.TelemetryFieldKey, error) {
|
||||||
|
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||||
|
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
|
||||||
|
instrumentationtypes.CodeNamespace: "metadata",
|
||||||
|
instrumentationtypes.CodeFunctionName: "auditTblStatementToFieldKeys",
|
||||||
|
})
|
||||||
|
|
||||||
|
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", t.auditDBName, t.auditLogsTblName)
|
||||||
|
statements := []telemetrytypes.ShowCreateTableStatement{}
|
||||||
|
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
materialisedKeys, err := ExtractFieldKeysFromTblStatement(statements[0].Statement)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx := range materialisedKeys {
|
||||||
|
materialisedKeys[idx].Signal = telemetrytypes.SignalLogs
|
||||||
|
}
|
||||||
|
|
||||||
|
return materialisedKeys, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) getAuditKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||||
|
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||||
|
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
|
||||||
|
instrumentationtypes.CodeNamespace: "metadata",
|
||||||
|
instrumentationtypes.CodeFunctionName: "getAuditKeys",
|
||||||
|
})
|
||||||
|
|
||||||
|
if len(fieldKeySelectors) == 0 {
|
||||||
|
return nil, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
matKeys, err := t.auditTblStatementToFieldKeys(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
|
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
|
||||||
|
for _, key := range matKeys {
|
||||||
|
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
|
||||||
|
}
|
||||||
|
|
||||||
|
var queries []string
|
||||||
|
var allArgs []any
|
||||||
|
|
||||||
|
queryAttributeTable := false
|
||||||
|
queryResourceTable := false
|
||||||
|
|
||||||
|
for _, selector := range fieldKeySelectors {
|
||||||
|
if selector.FieldContext == telemetrytypes.FieldContextUnspecified {
|
||||||
|
queryAttributeTable = true
|
||||||
|
queryResourceTable = true
|
||||||
|
break
|
||||||
|
} else if selector.FieldContext == telemetrytypes.FieldContextAttribute {
|
||||||
|
queryAttributeTable = true
|
||||||
|
} else if selector.FieldContext == telemetrytypes.FieldContextResource {
|
||||||
|
queryResourceTable = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tablesToQuery := []struct {
|
||||||
|
fieldContext telemetrytypes.FieldContext
|
||||||
|
shouldQuery bool
|
||||||
|
tblName string
|
||||||
|
}{
|
||||||
|
{telemetrytypes.FieldContextAttribute, queryAttributeTable, t.auditDBName + "." + t.auditAttributeKeysTblName},
|
||||||
|
{telemetrytypes.FieldContextResource, queryResourceTable, t.auditDBName + "." + t.auditResourceKeysTblName},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, table := range tablesToQuery {
|
||||||
|
if !table.shouldQuery {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldContext := table.fieldContext
|
||||||
|
tblName := table.tblName
|
||||||
|
|
||||||
|
sb := sqlbuilder.Select(
|
||||||
|
"name AS tag_key",
|
||||||
|
fmt.Sprintf("'%s' AS tag_type", fieldContext.TagType()),
|
||||||
|
"lower(datatype) AS tag_data_type",
|
||||||
|
fmt.Sprintf("%d AS priority", getPriorityForContext(fieldContext)),
|
||||||
|
).From(tblName)
|
||||||
|
|
||||||
|
var limit int
|
||||||
|
conds := []string{}
|
||||||
|
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified && fieldKeySelector.FieldContext != fieldContext {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldKeyConds := []string{}
|
||||||
|
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("name", fieldKeySelector.Name))
|
||||||
|
} else {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.ILike("name", "%"+escapeForLike(fieldKeySelector.Name)+"%"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
||||||
|
fieldKeyConds = append(fieldKeyConds, sb.E("datatype", fieldKeySelector.FieldDataType.TagDataType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(fieldKeyConds) > 0 {
|
||||||
|
conds = append(conds, sb.And(fieldKeyConds...))
|
||||||
|
}
|
||||||
|
limit += fieldKeySelector.Limit
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(conds) > 0 {
|
||||||
|
sb.Where(sb.Or(conds...))
|
||||||
|
}
|
||||||
|
|
||||||
|
sb.GroupBy("name", "datatype")
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
queries = append(queries, query)
|
||||||
|
allArgs = append(allArgs, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(queries) == 0 {
|
||||||
|
return []*telemetrytypes.TelemetryFieldKey{}, true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var limit int
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
limit += fieldKeySelector.Limit
|
||||||
|
}
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 1000
|
||||||
|
}
|
||||||
|
|
||||||
|
mainQuery := fmt.Sprintf(`
|
||||||
|
SELECT tag_key, tag_type, tag_data_type, max(priority) as priority
|
||||||
|
FROM (
|
||||||
|
%s
|
||||||
|
) AS combined_results
|
||||||
|
GROUP BY tag_key, tag_type, tag_data_type
|
||||||
|
ORDER BY priority
|
||||||
|
LIMIT %d
|
||||||
|
`, strings.Join(queries, " UNION ALL "), limit+1)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, mainQuery, allArgs...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
keys := []*telemetrytypes.TelemetryFieldKey{}
|
||||||
|
rowCount := 0
|
||||||
|
searchTexts := []string{}
|
||||||
|
|
||||||
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
|
searchTexts = append(searchTexts, fieldKeySelector.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
rowCount++
|
||||||
|
if rowCount > limit {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
var name string
|
||||||
|
var fieldContext telemetrytypes.FieldContext
|
||||||
|
var fieldDataType telemetrytypes.FieldDataType
|
||||||
|
var priority uint8
|
||||||
|
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
|
||||||
|
}
|
||||||
|
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
key = &telemetrytypes.TelemetryFieldKey{
|
||||||
|
Name: name,
|
||||||
|
Signal: telemetrytypes.SignalLogs,
|
||||||
|
FieldContext: fieldContext,
|
||||||
|
FieldDataType: fieldDataType,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
keys = append(keys, key)
|
||||||
|
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
|
||||||
|
}
|
||||||
|
|
||||||
|
if rows.Err() != nil {
|
||||||
|
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
complete := rowCount <= limit
|
||||||
|
|
||||||
|
// Add intrinsic audit fields (same as logs intrinsics: body, severity_text, etc.)
|
||||||
|
staticKeys := maps.Keys(telemetryaudit.IntrinsicFields)
|
||||||
|
for _, key := range staticKeys {
|
||||||
|
found := false
|
||||||
|
for _, v := range searchTexts {
|
||||||
|
if v == "" || strings.Contains(key, v) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if found {
|
||||||
|
if field, exists := telemetryaudit.IntrinsicFields[key]; exists {
|
||||||
|
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
|
||||||
|
keys = append(keys, &field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return keys, complete, nil
|
||||||
|
}
|
||||||
|
|
||||||
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
|
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
|
||||||
switch ctx {
|
switch ctx {
|
||||||
case telemetrytypes.FieldContextLog:
|
case telemetrytypes.FieldContextLog:
|
||||||
@@ -889,7 +1127,11 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
|
|||||||
case telemetrytypes.SignalTraces:
|
case telemetrytypes.SignalTraces:
|
||||||
keys, complete, err = t.getTracesKeys(ctx, selectors)
|
keys, complete, err = t.getTracesKeys(ctx, selectors)
|
||||||
case telemetrytypes.SignalLogs:
|
case telemetrytypes.SignalLogs:
|
||||||
keys, complete, err = t.getLogsKeys(ctx, selectors)
|
if fieldKeySelector.Source == telemetrytypes.SourceAudit {
|
||||||
|
keys, complete, err = t.getAuditKeys(ctx, selectors)
|
||||||
|
} else {
|
||||||
|
keys, complete, err = t.getLogsKeys(ctx, selectors)
|
||||||
|
}
|
||||||
case telemetrytypes.SignalMetrics:
|
case telemetrytypes.SignalMetrics:
|
||||||
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
|
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
|
||||||
keys, complete, err = t.getMeterSourceMetricKeys(ctx, selectors)
|
keys, complete, err = t.getMeterSourceMetricKeys(ctx, selectors)
|
||||||
@@ -938,6 +1180,7 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
|
|||||||
func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, bool, error) {
|
||||||
|
|
||||||
logsSelectors := []*telemetrytypes.FieldKeySelector{}
|
logsSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
|
auditSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
tracesSelectors := []*telemetrytypes.FieldKeySelector{}
|
tracesSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
metricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
metricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
meterSourceMetricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
meterSourceMetricsSelectors := []*telemetrytypes.FieldKeySelector{}
|
||||||
@@ -945,7 +1188,11 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
|
|||||||
for _, fieldKeySelector := range fieldKeySelectors {
|
for _, fieldKeySelector := range fieldKeySelectors {
|
||||||
switch fieldKeySelector.Signal {
|
switch fieldKeySelector.Signal {
|
||||||
case telemetrytypes.SignalLogs:
|
case telemetrytypes.SignalLogs:
|
||||||
logsSelectors = append(logsSelectors, fieldKeySelector)
|
if fieldKeySelector.Source == telemetrytypes.SourceAudit {
|
||||||
|
auditSelectors = append(auditSelectors, fieldKeySelector)
|
||||||
|
} else {
|
||||||
|
logsSelectors = append(logsSelectors, fieldKeySelector)
|
||||||
|
}
|
||||||
case telemetrytypes.SignalTraces:
|
case telemetrytypes.SignalTraces:
|
||||||
tracesSelectors = append(tracesSelectors, fieldKeySelector)
|
tracesSelectors = append(tracesSelectors, fieldKeySelector)
|
||||||
case telemetrytypes.SignalMetrics:
|
case telemetrytypes.SignalMetrics:
|
||||||
@@ -965,6 +1212,10 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
|
auditKeys, auditComplete, err := t.getAuditKeys(ctx, auditSelectors)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
tracesKeys, tracesComplete, err := t.getTracesKeys(ctx, tracesSelectors)
|
tracesKeys, tracesComplete, err := t.getTracesKeys(ctx, tracesSelectors)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false, err
|
return nil, false, err
|
||||||
@@ -979,12 +1230,15 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
|
|||||||
return nil, false, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
// Complete only if all queries are complete
|
// Complete only if all queries are complete
|
||||||
complete := logsComplete && tracesComplete && metricsComplete
|
complete := logsComplete && auditComplete && tracesComplete && metricsComplete
|
||||||
|
|
||||||
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
|
||||||
for _, key := range logsKeys {
|
for _, key := range logsKeys {
|
||||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
}
|
}
|
||||||
|
for _, key := range auditKeys {
|
||||||
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
|
}
|
||||||
for _, key := range tracesKeys {
|
for _, key := range tracesKeys {
|
||||||
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
|
||||||
}
|
}
|
||||||
@@ -1338,6 +1592,97 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
|
|||||||
return values, complete, nil
|
return values, complete, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *telemetryMetaStore) getAuditFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||||
|
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||||
|
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
|
||||||
|
instrumentationtypes.CodeNamespace: "metadata",
|
||||||
|
instrumentationtypes.CodeFunctionName: "getAuditFieldValues",
|
||||||
|
})
|
||||||
|
|
||||||
|
limit := fieldValueSelector.Limit
|
||||||
|
if limit == 0 {
|
||||||
|
limit = 50
|
||||||
|
}
|
||||||
|
|
||||||
|
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.auditDBName + "." + t.auditFieldsTblName)
|
||||||
|
|
||||||
|
if fieldValueSelector.Name != "" {
|
||||||
|
sb.Where(sb.E("tag_key", fieldValueSelector.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||||
|
sb.Where(sb.E("tag_type", fieldValueSelector.FieldContext.TagType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
|
||||||
|
sb.Where(sb.E("tag_data_type", fieldValueSelector.FieldDataType.TagDataType()))
|
||||||
|
}
|
||||||
|
|
||||||
|
if fieldValueSelector.Value != "" {
|
||||||
|
switch fieldValueSelector.FieldDataType {
|
||||||
|
case telemetrytypes.FieldDataTypeString:
|
||||||
|
sb.Where(sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
||||||
|
case telemetrytypes.FieldDataTypeNumber:
|
||||||
|
sb.Where(sb.IsNotNull("number_value"))
|
||||||
|
sb.Where(sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
|
||||||
|
case telemetrytypes.FieldDataTypeUnspecified:
|
||||||
|
sb.Where(sb.Or(
|
||||||
|
sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
|
||||||
|
sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// fetch one extra row to detect whether the result set is complete
|
||||||
|
sb.Limit(limit + 1)
|
||||||
|
|
||||||
|
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||||
|
|
||||||
|
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
values := &telemetrytypes.TelemetryFieldValues{}
|
||||||
|
seen := make(map[string]bool)
|
||||||
|
rowCount := 0
|
||||||
|
totalCount := 0
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
rowCount++
|
||||||
|
|
||||||
|
var stringValue string
|
||||||
|
var numberValue float64
|
||||||
|
err = rows.Scan(&stringValue, &numberValue)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
|
||||||
|
}
|
||||||
|
if stringValue != "" && !seen[stringValue] {
|
||||||
|
if totalCount >= limit {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
values.StringValues = append(values.StringValues, stringValue)
|
||||||
|
seen[stringValue] = true
|
||||||
|
totalCount++
|
||||||
|
}
|
||||||
|
if numberValue != 0 {
|
||||||
|
if totalCount >= limit {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if !seen[fmt.Sprintf("%f", numberValue)] {
|
||||||
|
values.NumberValues = append(values.NumberValues, numberValue)
|
||||||
|
seen[fmt.Sprintf("%f", numberValue)] = true
|
||||||
|
totalCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
complete := rowCount <= limit
|
||||||
|
|
||||||
|
return values, complete, nil
|
||||||
|
}
|
||||||
|
|
||||||
// getMetricFieldValues returns field values and whether the result is complete.
|
// getMetricFieldValues returns field values and whether the result is complete.
|
||||||
func (t *telemetryMetaStore) getMetricFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
func (t *telemetryMetaStore) getMetricFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||||
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
|
||||||
@@ -1628,7 +1973,11 @@ func (t *telemetryMetaStore) GetAllValues(ctx context.Context, fieldValueSelecto
|
|||||||
case telemetrytypes.SignalTraces:
|
case telemetrytypes.SignalTraces:
|
||||||
values, complete, err = t.getSpanFieldValues(ctx, fieldValueSelector)
|
values, complete, err = t.getSpanFieldValues(ctx, fieldValueSelector)
|
||||||
case telemetrytypes.SignalLogs:
|
case telemetrytypes.SignalLogs:
|
||||||
values, complete, err = t.getLogFieldValues(ctx, fieldValueSelector)
|
if fieldValueSelector.Source == telemetrytypes.SourceAudit {
|
||||||
|
values, complete, err = t.getAuditFieldValues(ctx, fieldValueSelector)
|
||||||
|
} else {
|
||||||
|
values, complete, err = t.getLogFieldValues(ctx, fieldValueSelector)
|
||||||
|
}
|
||||||
case telemetrytypes.SignalMetrics:
|
case telemetrytypes.SignalMetrics:
|
||||||
if fieldValueSelector.Source == telemetrytypes.SourceMeter {
|
if fieldValueSelector.Source == telemetrytypes.SourceMeter {
|
||||||
values, complete, err = t.getMeterSourceMetricFieldValues(ctx, fieldValueSelector)
|
values, complete, err = t.getMeterSourceMetricFieldValues(ctx, fieldValueSelector)
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetryaudit"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||||
@@ -37,6 +38,11 @@ func TestGetFirstSeenFromMetricMetadata(t *testing.T) {
|
|||||||
telemetrylogs.TagAttributesV2TableName,
|
telemetrylogs.TagAttributesV2TableName,
|
||||||
telemetrylogs.LogAttributeKeysTblName,
|
telemetrylogs.LogAttributeKeysTblName,
|
||||||
telemetrylogs.LogResourceKeysTblName,
|
telemetrylogs.LogResourceKeysTblName,
|
||||||
|
telemetryaudit.DBName,
|
||||||
|
telemetryaudit.AuditLogsTableName,
|
||||||
|
telemetryaudit.TagAttributesTableName,
|
||||||
|
telemetryaudit.LogAttributeKeysTblName,
|
||||||
|
telemetryaudit.LogResourceKeysTblName,
|
||||||
DBName,
|
DBName,
|
||||||
AttributesMetadataLocalTableName,
|
AttributesMetadataLocalTableName,
|
||||||
ColumnEvolutionMetadataTableName,
|
ColumnEvolutionMetadataTableName,
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||||
|
"github.com/SigNoz/signoz/pkg/telemetryaudit"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
"github.com/SigNoz/signoz/pkg/telemetrymeter"
|
||||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||||
@@ -36,6 +37,11 @@ func newTestTelemetryMetaStoreTestHelper(store telemetrystore.TelemetryStore) te
|
|||||||
telemetrylogs.TagAttributesV2TableName,
|
telemetrylogs.TagAttributesV2TableName,
|
||||||
telemetrylogs.LogAttributeKeysTblName,
|
telemetrylogs.LogAttributeKeysTblName,
|
||||||
telemetrylogs.LogResourceKeysTblName,
|
telemetrylogs.LogResourceKeysTblName,
|
||||||
|
telemetryaudit.DBName,
|
||||||
|
telemetryaudit.AuditLogsTableName,
|
||||||
|
telemetryaudit.TagAttributesTableName,
|
||||||
|
telemetryaudit.LogAttributeKeysTblName,
|
||||||
|
telemetryaudit.LogResourceKeysTblName,
|
||||||
DBName,
|
DBName,
|
||||||
AttributesMetadataLocalTableName,
|
AttributesMetadataLocalTableName,
|
||||||
ColumnEvolutionMetadataTableName,
|
ColumnEvolutionMetadataTableName,
|
||||||
|
|||||||
@@ -9,6 +9,6 @@ const (
|
|||||||
ColumnEvolutionMetadataTableName = "distributed_column_evolution_metadata"
|
ColumnEvolutionMetadataTableName = "distributed_column_evolution_metadata"
|
||||||
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
||||||
// Column Evolution table stores promoted paths as (signal, column_name, field_context, field_name); see signoz-otel-collector metadata_migrations.
|
// Column Evolution table stores promoted paths as (signal, column_name, field_context, field_name); see signoz-otel-collector metadata_migrations.
|
||||||
PromotedPathsTableName = "distributed_column_evolution_metadata"
|
PromotedPathsTableName = "distributed_column_evolution_metadata"
|
||||||
SkipIndexTableName = "system.data_skipping_indices"
|
SkipIndexTableName = "system.data_skipping_indices"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ type resourceFilterStatementBuilder[T any] struct {
|
|||||||
conditionBuilder qbtypes.ConditionBuilder
|
conditionBuilder qbtypes.ConditionBuilder
|
||||||
metadataStore telemetrytypes.MetadataStore
|
metadataStore telemetrytypes.MetadataStore
|
||||||
signal telemetrytypes.Signal
|
signal telemetrytypes.Signal
|
||||||
|
source telemetrytypes.Source
|
||||||
|
|
||||||
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
fullTextColumn *telemetrytypes.TelemetryFieldKey
|
||||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
|
||||||
@@ -37,6 +38,7 @@ func New[T any](
|
|||||||
dbName string,
|
dbName string,
|
||||||
tableName string,
|
tableName string,
|
||||||
signal telemetrytypes.Signal,
|
signal telemetrytypes.Signal,
|
||||||
|
source telemetrytypes.Source,
|
||||||
metadataStore telemetrytypes.MetadataStore,
|
metadataStore telemetrytypes.MetadataStore,
|
||||||
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
fullTextColumn *telemetrytypes.TelemetryFieldKey,
|
||||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||||
@@ -52,6 +54,7 @@ func New[T any](
|
|||||||
conditionBuilder: cb,
|
conditionBuilder: cb,
|
||||||
metadataStore: metadataStore,
|
metadataStore: metadataStore,
|
||||||
signal: signal,
|
signal: signal,
|
||||||
|
source: source,
|
||||||
fullTextColumn: fullTextColumn,
|
fullTextColumn: fullTextColumn,
|
||||||
jsonKeyToKey: jsonKeyToKey,
|
jsonKeyToKey: jsonKeyToKey,
|
||||||
}
|
}
|
||||||
@@ -72,6 +75,7 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
keySelectors[idx].Signal = b.signal
|
keySelectors[idx].Signal = b.signal
|
||||||
|
keySelectors[idx].Source = b.source
|
||||||
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
|
||||||
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
|
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -375,6 +375,7 @@ func TestResourceFilterStatementBuilder_Traces(t *testing.T) {
|
|||||||
"signoz_traces",
|
"signoz_traces",
|
||||||
"distributed_traces_v3_resource",
|
"distributed_traces_v3_resource",
|
||||||
telemetrytypes.SignalTraces,
|
telemetrytypes.SignalTraces,
|
||||||
|
telemetrytypes.SourceUnspecified,
|
||||||
mockMetadataStore,
|
mockMetadataStore,
|
||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
@@ -592,6 +593,7 @@ func TestResourceFilterStatementBuilder_Logs(t *testing.T) {
|
|||||||
"signoz_logs",
|
"signoz_logs",
|
||||||
"distributed_logs_v2_resource",
|
"distributed_logs_v2_resource",
|
||||||
telemetrytypes.SignalLogs,
|
telemetrytypes.SignalLogs,
|
||||||
|
telemetrytypes.SourceUnspecified,
|
||||||
mockMetadataStore,
|
mockMetadataStore,
|
||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
@@ -653,6 +655,7 @@ func TestResourceFilterStatementBuilder_Variables(t *testing.T) {
|
|||||||
"signoz_traces",
|
"signoz_traces",
|
||||||
"distributed_traces_v3_resource",
|
"distributed_traces_v3_resource",
|
||||||
telemetrytypes.SignalTraces,
|
telemetrytypes.SignalTraces,
|
||||||
|
telemetrytypes.SourceUnspecified,
|
||||||
mockMetadataStore,
|
mockMetadataStore,
|
||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ func NewTraceQueryStatementBuilder(
|
|||||||
DBName,
|
DBName,
|
||||||
TracesResourceV3TableName,
|
TracesResourceV3TableName,
|
||||||
telemetrytypes.SignalTraces,
|
telemetrytypes.SignalTraces,
|
||||||
|
telemetrytypes.SourceUnspecified,
|
||||||
metadataStore,
|
metadataStore,
|
||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ func NewTraceOperatorStatementBuilder(
|
|||||||
DBName,
|
DBName,
|
||||||
TracesResourceV3TableName,
|
TracesResourceV3TableName,
|
||||||
telemetrytypes.SignalTraces,
|
telemetrytypes.SignalTraces,
|
||||||
|
telemetrytypes.SourceUnspecified,
|
||||||
metadataStore,
|
metadataStore,
|
||||||
nil,
|
nil,
|
||||||
nil,
|
nil,
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
package cloudintegrationtypes
|
package cloudintegrationtypes
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/types"
|
"github.com/SigNoz/signoz/pkg/types"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
@@ -29,16 +31,285 @@ type AccountConfig struct {
|
|||||||
AWS *AWSAccountConfig `json:"aws" required:"true" nullable:"false"`
|
AWS *AWSAccountConfig `json:"aws" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AWSAccountConfig struct {
|
||||||
|
Regions []string `json:"regions" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PostableAccount struct {
|
||||||
|
Config *PostableAccountConfig `json:"config" required:"true"`
|
||||||
|
Credentials *Credentials `json:"credentials" required:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PostableAccountConfig struct {
|
||||||
|
// as agent version is common for all providers, we can keep it at top level of this struct
|
||||||
|
AgentVersion string
|
||||||
|
Aws *AWSPostableAccountConfig `json:"aws" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Credentials struct {
|
||||||
|
SigNozAPIURL string `json:"sigNozApiUrl" required:"true"`
|
||||||
|
SigNozAPIKey string `json:"sigNozApiKey" required:"true"` // PAT
|
||||||
|
IngestionURL string `json:"ingestionUrl" required:"true"`
|
||||||
|
IngestionKey string `json:"ingestionKey" required:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AWSPostableAccountConfig struct {
|
||||||
|
DeploymentRegion string `json:"deploymentRegion" required:"true"`
|
||||||
|
Regions []string `json:"regions" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GettableAccountWithConnectionArtifact struct {
|
||||||
|
ID valuer.UUID `json:"id" required:"true"`
|
||||||
|
ConnectionArtifact *ConnectionArtifact `json:"connectionArtifact" required:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ConnectionArtifact struct {
|
||||||
|
// required till new providers are added
|
||||||
|
Aws *AWSConnectionArtifact `json:"aws" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AWSConnectionArtifact struct {
|
||||||
|
ConnectionURL string `json:"connectionUrl" required:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetConnectionArtifactRequest = PostableAccount
|
||||||
|
|
||||||
type GettableAccounts struct {
|
type GettableAccounts struct {
|
||||||
Accounts []*Account `json:"accounts" required:"true" nullable:"false"`
|
Accounts []*Account `json:"accounts" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GettableAccount = Account
|
|
||||||
|
|
||||||
type UpdatableAccount struct {
|
type UpdatableAccount struct {
|
||||||
Config *AccountConfig `json:"config" required:"true" nullable:"false"`
|
Config *AccountConfig `json:"config" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type AWSAccountConfig struct {
|
func NewAccount(orgID valuer.UUID, provider CloudProviderType, config *AccountConfig) *Account {
|
||||||
Regions []string `json:"regions" required:"true" nullable:"false"`
|
return &Account{
|
||||||
|
Identifiable: types.Identifiable{
|
||||||
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
},
|
||||||
|
OrgID: orgID,
|
||||||
|
Provider: provider,
|
||||||
|
Config: config,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAccountFromStorable(storableAccount *StorableCloudIntegration) (*Account, error) {
|
||||||
|
// config can not be empty
|
||||||
|
if storableAccount.Config == "" {
|
||||||
|
return nil, errors.NewInternalf(errors.CodeInternal, "config is empty for account with id: %s", storableAccount.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
account := &Account{
|
||||||
|
Identifiable: storableAccount.Identifiable,
|
||||||
|
TimeAuditable: storableAccount.TimeAuditable,
|
||||||
|
ProviderAccountID: storableAccount.AccountID,
|
||||||
|
Provider: storableAccount.Provider,
|
||||||
|
RemovedAt: storableAccount.RemovedAt,
|
||||||
|
OrgID: storableAccount.OrgID,
|
||||||
|
Config: new(AccountConfig),
|
||||||
|
}
|
||||||
|
|
||||||
|
switch storableAccount.Provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
awsConfig := new(AWSAccountConfig)
|
||||||
|
err := json.Unmarshal([]byte(storableAccount.Config), awsConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
account.Config.AWS = awsConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
if storableAccount.LastAgentReport != nil {
|
||||||
|
account.AgentReport = &AgentReport{
|
||||||
|
TimestampMillis: storableAccount.LastAgentReport.TimestampMillis,
|
||||||
|
Data: storableAccount.LastAgentReport.Data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return account, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAccountsFromStorables(storableAccounts []*StorableCloudIntegration) ([]*Account, error) {
|
||||||
|
accounts := make([]*Account, 0, len(storableAccounts))
|
||||||
|
for _, storableAccount := range storableAccounts {
|
||||||
|
account, err := NewAccountFromStorable(storableAccount)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
accounts = append(accounts, account)
|
||||||
|
}
|
||||||
|
|
||||||
|
return accounts, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (account *Account) Update(config *AccountConfig) error {
|
||||||
|
if account.RemovedAt != nil {
|
||||||
|
return errors.New(errors.TypeUnsupported, ErrCodeCloudIntegrationRemoved, "this operation is not supported for a removed cloud integration account")
|
||||||
|
}
|
||||||
|
account.Config = config
|
||||||
|
account.UpdatedAt = time.Now()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (account *Account) IsRemoved() bool {
|
||||||
|
return account.RemovedAt != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAccountConfigFromPostable(provider CloudProviderType, config *PostableAccountConfig) (*AccountConfig, error) {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
if config.Aws == nil {
|
||||||
|
return nil, errors.NewInternalf(errors.CodeInternal, "AWS config is nil")
|
||||||
|
}
|
||||||
|
return &AccountConfig{
|
||||||
|
AWS: &AWSAccountConfig{
|
||||||
|
Regions: config.Aws.Regions,
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||||
|
}
|
||||||
|
|
||||||
|
// func NewAccountFromPostableAccount(provider CloudProviderType, account *PostableAccount) (*Account, error) {
|
||||||
|
// req := &Account{
|
||||||
|
// Credentials: account.Credentials,
|
||||||
|
// }
|
||||||
|
|
||||||
|
// switch provider {
|
||||||
|
// case CloudProviderTypeAWS:
|
||||||
|
// req.Config = &ConnectionArtifactRequestConfig{
|
||||||
|
// Aws: &AWSConnectionArtifactRequest{
|
||||||
|
// DeploymentRegion: artifact.Config.Aws.DeploymentRegion,
|
||||||
|
// Regions: artifact.Config.Aws.Regions,
|
||||||
|
// },
|
||||||
|
// }
|
||||||
|
|
||||||
|
// return req, nil
|
||||||
|
// default:
|
||||||
|
// return nil, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
func NewAgentReport(data map[string]any) *AgentReport {
|
||||||
|
return &AgentReport{
|
||||||
|
TimestampMillis: time.Now().UnixMilli(),
|
||||||
|
Data: data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToJSON return JSON bytes for the provider's config
|
||||||
|
// thats why not naming it MarshalJSON(), as it will interfere with default JSON marshalling of AccountConfig struct.
|
||||||
|
// NOTE: this entertains first non-null provider's config.
|
||||||
|
func (config *AccountConfig) ToJSON() ([]byte, error) {
|
||||||
|
if config.AWS != nil {
|
||||||
|
return json.Marshal(config.AWS)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errors.NewInternalf(errors.CodeInternal, "no provider account config found")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config *PostableAccountConfig) AddAgentVersion(agentVersion string) {
|
||||||
|
config.AgentVersion = agentVersion
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate checks that the connection artifact request has a valid provider-specific block
|
||||||
|
// with non-empty, valid regions and a valid deployment region.
|
||||||
|
func (account *PostableAccount) Validate(provider CloudProviderType) error {
|
||||||
|
if account.Config == nil || account.Credentials == nil {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"config and credentials are required")
|
||||||
|
}
|
||||||
|
|
||||||
|
if account.Credentials.SigNozAPIURL == "" {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"sigNozApiURL can not be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if account.Credentials.SigNozAPIKey == "" {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"sigNozApiKey can not be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if account.Credentials.IngestionURL == "" {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"ingestionUrl can not be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
if account.Credentials.IngestionKey == "" {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"ingestionKey can not be empty")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
if account.Config.Aws == nil {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"aws configuration is required")
|
||||||
|
}
|
||||||
|
return account.Config.Aws.Validate()
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate checks that the AWS connection artifact request has a valid deployment region
|
||||||
|
// and a non-empty list of valid regions.
|
||||||
|
func (req *AWSPostableAccountConfig) Validate() error {
|
||||||
|
if req.DeploymentRegion == "" {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"deploymentRegion is required")
|
||||||
|
}
|
||||||
|
if _, ok := ValidAWSRegions[req.DeploymentRegion]; !ok {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidCloudRegion,
|
||||||
|
"invalid deployment region: %s", req.DeploymentRegion)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(req.Regions) == 0 {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"at least one region is required")
|
||||||
|
}
|
||||||
|
for _, region := range req.Regions {
|
||||||
|
if _, ok := ValidAWSRegions[region]; !ok {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidCloudRegion,
|
||||||
|
"invalid AWS region: %s", region)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (updatable *UpdatableAccount) Validate(provider CloudProviderType) error {
|
||||||
|
if updatable.Config == nil {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"config is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
if updatable.Config.AWS == nil {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"aws configuration is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(updatable.Config.AWS.Regions) == 0 {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"at least one region is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, region := range updatable.Config.AWS.Regions {
|
||||||
|
if _, ok := ValidAWSRegions[region]; !ok {
|
||||||
|
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidCloudRegion,
|
||||||
|
"invalid AWS region: %s", region)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput,
|
||||||
|
"invalid cloud provider: %s", provider.StringValue())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
93
pkg/types/cloudintegrationtypes/checkin.go
Normal file
93
pkg/types/cloudintegrationtypes/checkin.go
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
package cloudintegrationtypes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AgentCheckInRequest struct {
|
||||||
|
ProviderAccountID string `json:"providerAccountId" required:"false"`
|
||||||
|
CloudIntegrationID valuer.UUID `json:"cloudIntegrationId" required:"false"`
|
||||||
|
|
||||||
|
Data map[string]any `json:"data" required:"true" nullable:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PostableAgentCheckIn struct {
|
||||||
|
AgentCheckInRequest
|
||||||
|
// following are backward compatible fields for older running agents, hence snake case JSON keys.
|
||||||
|
// Which get mapped to new fields in AgentCheckInRequest
|
||||||
|
ID string `json:"account_id" required:"false"` // => CloudIntegrationID
|
||||||
|
AccountID string `json:"cloud_account_id" required:"false"` // => ProviderAccountID
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentCheckInResponse struct {
|
||||||
|
CloudIntegrationID string `json:"cloudIntegrationId" required:"true"`
|
||||||
|
ProviderAccountID string `json:"providerAccountId" required:"true"`
|
||||||
|
IntegrationConfig *ProviderIntegrationConfig `json:"integrationConfig" required:"true"`
|
||||||
|
RemovedAt *time.Time `json:"removedAt" required:"true" nullable:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GettableAgentCheckIn struct {
|
||||||
|
// Older fields for backward compatibility with existing AWS agents
|
||||||
|
AccountID string `json:"account_id" required:"true"`
|
||||||
|
CloudAccountID string `json:"cloud_account_id" required:"true"`
|
||||||
|
OlderIntegrationConfig *IntegrationConfig `json:"integration_config" required:"true" nullable:"true"`
|
||||||
|
OlderRemovedAt *time.Time `json:"removed_at" required:"true" nullable:"true"`
|
||||||
|
|
||||||
|
AgentCheckInResponse
|
||||||
|
}
|
||||||
|
|
||||||
|
// IntegrationConfig older integration config struct for backward compatibility,
|
||||||
|
// this will be eventually removed once agents are updated to use new struct.
|
||||||
|
type IntegrationConfig struct {
|
||||||
|
EnabledRegions []string `json:"enabled_regions" required:"true" nullable:"false"` // backward compatible
|
||||||
|
Telemetry *OldAWSCollectionStrategy `json:"telemetry" required:"true" nullable:"false"` // backward compatible
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProviderIntegrationConfig struct {
|
||||||
|
AWS *AWSIntegrationConfig `json:"aws" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AWSIntegrationConfig struct {
|
||||||
|
EnabledRegions []string `json:"enabledRegions" required:"true" nullable:"false"`
|
||||||
|
TelemetryCollectionStrategy *AWSTelemetryCollectionStrategy `json:"telemetryCollectionStrategy" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewGettableAgentCheckIn constructs a backward-compatible response from an AgentCheckInResponse.
|
||||||
|
// It populates the old snake_case fields (account_id, cloud_account_id, integration_config, removed_at)
|
||||||
|
// from the new camelCase fields so older agents continue to work unchanged.
|
||||||
|
// The provider parameter controls which provider-specific block is mapped into the legacy integration_config.
|
||||||
|
func NewGettableAgentCheckIn(provider CloudProviderType, resp *AgentCheckInResponse) *GettableAgentCheckIn {
|
||||||
|
gettable := &GettableAgentCheckIn{
|
||||||
|
AccountID: resp.CloudIntegrationID,
|
||||||
|
CloudAccountID: resp.ProviderAccountID,
|
||||||
|
OlderRemovedAt: resp.RemovedAt,
|
||||||
|
AgentCheckInResponse: *resp,
|
||||||
|
}
|
||||||
|
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
gettable.OlderIntegrationConfig = awsOlderIntegrationConfig(resp.IntegrationConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
return gettable
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate checks that the request uses either old fields (account_id, cloud_account_id) or
|
||||||
|
// new fields (cloudIntegrationId, providerAccountId), never a mix of both.
|
||||||
|
func (req *PostableAgentCheckIn) Validate() error {
|
||||||
|
hasOldFields := req.ID != "" || req.AccountID != ""
|
||||||
|
hasNewFields := !req.CloudIntegrationID.IsZero() || req.ProviderAccountID != ""
|
||||||
|
|
||||||
|
if hasOldFields && hasNewFields {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"request must use either old fields (account_id, cloud_account_id) or new fields (cloudIntegrationId, providerAccountId), not both")
|
||||||
|
}
|
||||||
|
if !hasOldFields && !hasNewFields {
|
||||||
|
return errors.New(errors.TypeInvalidInput, ErrCodeInvalidInput,
|
||||||
|
"request must provide either old fields (account_id, cloud_account_id) or new fields (cloudIntegrationId, providerAccountId)")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -13,10 +13,16 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
ErrCodeUnsupported = errors.MustNewCode("cloud_integration_unsupported")
|
||||||
|
ErrCodeInvalidInput = errors.MustNewCode("cloud_integration_invalid_input")
|
||||||
ErrCodeCloudIntegrationNotFound = errors.MustNewCode("cloud_integration_not_found")
|
ErrCodeCloudIntegrationNotFound = errors.MustNewCode("cloud_integration_not_found")
|
||||||
ErrCodeCloudIntegrationAlreadyExists = errors.MustNewCode("cloud_integration_already_exists")
|
ErrCodeCloudIntegrationAlreadyExists = errors.MustNewCode("cloud_integration_already_exists")
|
||||||
|
ErrCodeCloudIntegrationAlreadyConnected = errors.MustNewCode("cloud_integration_already_connected")
|
||||||
|
ErrCodeCloudIntegrationInvalidConfig = errors.MustNewCode("cloud_integration_invalid_config")
|
||||||
|
ErrCodeCloudIntegrationRemoved = errors.MustNewCode("cloud_integration_removed")
|
||||||
ErrCodeCloudIntegrationServiceNotFound = errors.MustNewCode("cloud_integration_service_not_found")
|
ErrCodeCloudIntegrationServiceNotFound = errors.MustNewCode("cloud_integration_service_not_found")
|
||||||
ErrCodeCloudIntegrationServiceAlreadyExists = errors.MustNewCode("cloud_integration_service_already_exists")
|
ErrCodeCloudIntegrationServiceAlreadyExists = errors.MustNewCode("cloud_integration_service_already_exists")
|
||||||
|
ErrCodeServiceDefinitionNotFound = errors.MustNewCode("service_definition_not_found")
|
||||||
)
|
)
|
||||||
|
|
||||||
// StorableCloudIntegration represents a cloud integration stored in the database.
|
// StorableCloudIntegration represents a cloud integration stored in the database.
|
||||||
@@ -52,6 +58,26 @@ type StorableCloudIntegrationService struct {
|
|||||||
CloudIntegrationID valuer.UUID `bun:"cloud_integration_id,type:text"`
|
CloudIntegrationID valuer.UUID `bun:"cloud_integration_id,type:text"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Following Service config types are only internally used to store service config in DB and use JSON snake case keys for backward compatibility.
|
||||||
|
|
||||||
|
type StorableServiceConfig struct {
|
||||||
|
AWS *StorableAWSServiceConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorableAWSServiceConfig struct {
|
||||||
|
Logs *StorableAWSLogsServiceConfig `json:"logs,omitempty"`
|
||||||
|
Metrics *StorableAWSMetricsServiceConfig `json:"metrics,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorableAWSLogsServiceConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // region -> list of buckets in that region
|
||||||
|
}
|
||||||
|
|
||||||
|
type StorableAWSMetricsServiceConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
// Scan scans value from DB.
|
// Scan scans value from DB.
|
||||||
func (r *StorableAgentReport) Scan(src any) error {
|
func (r *StorableAgentReport) Scan(src any) error {
|
||||||
var data []byte
|
var data []byte
|
||||||
@@ -68,10 +94,6 @@ func (r *StorableAgentReport) Scan(src any) error {
|
|||||||
|
|
||||||
// Value creates value to be stored in DB.
|
// Value creates value to be stored in DB.
|
||||||
func (r *StorableAgentReport) Value() (driver.Value, error) {
|
func (r *StorableAgentReport) Value() (driver.Value, error) {
|
||||||
if r == nil {
|
|
||||||
return nil, errors.NewInternalf(errors.CodeInternal, "agent report is nil")
|
|
||||||
}
|
|
||||||
|
|
||||||
serialized, err := json.Marshal(r)
|
serialized, err := json.Marshal(r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.WrapInternalf(
|
return nil, errors.WrapInternalf(
|
||||||
@@ -81,3 +103,107 @@ func (r *StorableAgentReport) Value() (driver.Value, error) {
|
|||||||
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytes
|
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytes
|
||||||
return string(serialized), nil
|
return string(serialized), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewStorableCloudIntegration(account *Account) (*StorableCloudIntegration, error) {
|
||||||
|
configBytes, err := account.Config.ToJSON()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
storableAccount := &StorableCloudIntegration{
|
||||||
|
Identifiable: account.Identifiable,
|
||||||
|
TimeAuditable: account.TimeAuditable,
|
||||||
|
Provider: account.Provider,
|
||||||
|
Config: string(configBytes),
|
||||||
|
AccountID: account.ProviderAccountID,
|
||||||
|
OrgID: account.OrgID,
|
||||||
|
RemovedAt: account.RemovedAt,
|
||||||
|
}
|
||||||
|
|
||||||
|
if account.AgentReport != nil {
|
||||||
|
storableAccount.LastAgentReport = &StorableAgentReport{
|
||||||
|
TimestampMillis: account.AgentReport.TimestampMillis,
|
||||||
|
Data: account.AgentReport.Data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return storableAccount, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewStorableCloudIntegrationService creates a new StorableCloudIntegrationService with
|
||||||
|
// generated ID and timestamps from a CloudIntegrationService and its serialized config JSON.
|
||||||
|
func NewStorableCloudIntegrationService(svc *CloudIntegrationService, configJSON string) *StorableCloudIntegrationService {
|
||||||
|
return &StorableCloudIntegrationService{
|
||||||
|
Identifiable: svc.Identifiable,
|
||||||
|
TimeAuditable: svc.TimeAuditable,
|
||||||
|
Type: svc.Type,
|
||||||
|
Config: configJSON,
|
||||||
|
CloudIntegrationID: svc.CloudIntegrationID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (account *StorableCloudIntegration) Update(providerAccountID *string, agentReport *AgentReport) {
|
||||||
|
account.AccountID = providerAccountID
|
||||||
|
if agentReport != nil {
|
||||||
|
account.LastAgentReport = &StorableAgentReport{
|
||||||
|
TimestampMillis: agentReport.TimestampMillis,
|
||||||
|
Data: agentReport.Data,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// following StorableServiceConfig related functions are helper functions to convert between JSON string and ServiceConfig domain struct.
|
||||||
|
func newStorableServiceConfig(provider CloudProviderType, serviceID ServiceID, serviceConfig *ServiceConfig, supportedSignals *SupportedSignals) *StorableServiceConfig {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
storableAWSServiceConfig := new(StorableAWSServiceConfig)
|
||||||
|
|
||||||
|
if supportedSignals.Logs {
|
||||||
|
storableAWSServiceConfig.Logs = &StorableAWSLogsServiceConfig{
|
||||||
|
Enabled: serviceConfig.AWS.Logs.Enabled,
|
||||||
|
}
|
||||||
|
|
||||||
|
if serviceID == AWSServiceS3Sync {
|
||||||
|
storableAWSServiceConfig.Logs.S3Buckets = serviceConfig.AWS.Logs.S3Buckets
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if supportedSignals.Metrics {
|
||||||
|
storableAWSServiceConfig.Metrics = &StorableAWSMetricsServiceConfig{
|
||||||
|
Enabled: serviceConfig.AWS.Metrics.Enabled,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &StorableServiceConfig{AWS: storableAWSServiceConfig}
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newStorableServiceConfigFromJSON(provider CloudProviderType, jsonStr string) (*StorableServiceConfig, error) {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
awsConfig := new(StorableAWSServiceConfig)
|
||||||
|
err := json.Unmarshal([]byte(jsonStr), awsConfig)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't parse AWS service config JSON")
|
||||||
|
}
|
||||||
|
return &StorableServiceConfig{AWS: awsConfig}, nil
|
||||||
|
default:
|
||||||
|
return nil, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config *StorableServiceConfig) toJSON(provider CloudProviderType) ([]byte, error) {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
jsonBytes, err := json.Marshal(config.AWS)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize AWS service config to JSON")
|
||||||
|
}
|
||||||
|
|
||||||
|
return jsonBytes, nil
|
||||||
|
default:
|
||||||
|
return nil, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
package cloudintegrationtypes
|
package cloudintegrationtypes
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
"github.com/SigNoz/signoz/pkg/valuer"
|
||||||
)
|
)
|
||||||
@@ -14,19 +16,13 @@ var (
|
|||||||
CloudProviderTypeAzure = CloudProviderType{valuer.NewString("azure")}
|
CloudProviderTypeAzure = CloudProviderType{valuer.NewString("azure")}
|
||||||
|
|
||||||
// errors.
|
// errors.
|
||||||
ErrCodeCloudProviderInvalidInput = errors.MustNewCode("invalid_cloud_provider")
|
ErrCodeCloudProviderInvalidInput = errors.MustNewCode("cloud_integration_invalid_cloud_provider")
|
||||||
|
|
||||||
AWSIntegrationUserEmail = valuer.MustNewEmail("aws-integration@signoz.io")
|
CloudFormationQuickCreateBaseURL = valuer.NewString("https://%s.console.aws.amazon.com/cloudformation/home")
|
||||||
AzureIntegrationUserEmail = valuer.MustNewEmail("azure-integration@signoz.io")
|
AgentCloudFormationTemplateS3Path = valuer.NewString("https://signoz-integrations.s3.us-east-1.amazonaws.com/aws-quickcreate-template-%s.json")
|
||||||
|
AgentCloudFormationBaseStackName = valuer.NewString("signoz-integration")
|
||||||
)
|
)
|
||||||
|
|
||||||
// CloudIntegrationUserEmails is the list of valid emails for Cloud One Click integrations.
|
|
||||||
// This is used for validation and restrictions in different contexts, across codebase.
|
|
||||||
var CloudIntegrationUserEmails = []valuer.Email{
|
|
||||||
AWSIntegrationUserEmail,
|
|
||||||
AzureIntegrationUserEmail,
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewCloudProvider returns a new CloudProviderType from a string.
|
// NewCloudProvider returns a new CloudProviderType from a string.
|
||||||
// It validates the input and returns an error if the input is not valid cloud provider.
|
// It validates the input and returns an error if the input is not valid cloud provider.
|
||||||
func NewCloudProvider(provider string) (CloudProviderType, error) {
|
func NewCloudProvider(provider string) (CloudProviderType, error) {
|
||||||
@@ -39,3 +35,7 @@ func NewCloudProvider(provider string) (CloudProviderType, error) {
|
|||||||
return CloudProviderType{}, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
|
return CloudProviderType{}, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewIngestionKeyName(provider CloudProviderType) string {
|
||||||
|
return fmt.Sprintf("%s-integration", provider.StringValue())
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,81 +0,0 @@
|
|||||||
package cloudintegrationtypes
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/valuer"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ConnectionArtifactRequest struct {
|
|
||||||
// required till new providers are added
|
|
||||||
Aws *AWSConnectionArtifactRequest `json:"aws" required:"true" nullable:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AWSConnectionArtifactRequest struct {
|
|
||||||
DeploymentRegion string `json:"deploymentRegion" required:"true"`
|
|
||||||
Regions []string `json:"regions" required:"true" nullable:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type PostableConnectionArtifact = ConnectionArtifactRequest
|
|
||||||
|
|
||||||
type ConnectionArtifact struct {
|
|
||||||
// required till new providers are added
|
|
||||||
Aws *AWSConnectionArtifact `json:"aws" required:"true" nullable:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AWSConnectionArtifact struct {
|
|
||||||
ConnectionURL string `json:"connectionURL" required:"true"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GettableAccountWithArtifact struct {
|
|
||||||
ID valuer.UUID `json:"id" required:"true"`
|
|
||||||
Artifact *ConnectionArtifact `json:"connectionArtifact" required:"true"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AgentCheckInRequest struct {
|
|
||||||
ProviderAccountID string `json:"providerAccountId" required:"false"`
|
|
||||||
CloudIntegrationID string `json:"cloudIntegrationId" required:"false"`
|
|
||||||
|
|
||||||
Data map[string]any `json:"data" required:"true" nullable:"true"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type PostableAgentCheckInRequest struct {
|
|
||||||
AgentCheckInRequest
|
|
||||||
// following are backward compatible fields for older running agents
|
|
||||||
// which gets mapped to new fields in AgentCheckInRequest
|
|
||||||
ID string `json:"account_id" required:"false"` // => CloudIntegrationID
|
|
||||||
AccountID string `json:"cloud_account_id" required:"false"` // => ProviderAccountID
|
|
||||||
}
|
|
||||||
|
|
||||||
type AgentCheckInResponse struct {
|
|
||||||
CloudIntegrationID string `json:"cloudIntegrationId" required:"true"`
|
|
||||||
ProviderAccountID string `json:"providerAccountId" required:"true"`
|
|
||||||
IntegrationConfig *ProviderIntegrationConfig `json:"integrationConfig" required:"true"`
|
|
||||||
RemovedAt *time.Time `json:"removedAt" required:"true" nullable:"true"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GettableAgentCheckInResponse struct {
|
|
||||||
// Older fields for backward compatibility with existing AWS agents
|
|
||||||
AccountID string `json:"account_id" required:"true"`
|
|
||||||
CloudAccountID string `json:"cloud_account_id" required:"true"`
|
|
||||||
OlderIntegrationConfig *IntegrationConfig `json:"integration_config" required:"true" nullable:"true"`
|
|
||||||
OlderRemovedAt *time.Time `json:"removed_at" required:"true" nullable:"true"`
|
|
||||||
|
|
||||||
AgentCheckInResponse
|
|
||||||
}
|
|
||||||
|
|
||||||
// IntegrationConfig older integration config struct for backward compatibility,
|
|
||||||
// this will be eventually removed once agents are updated to use new struct.
|
|
||||||
type IntegrationConfig struct {
|
|
||||||
EnabledRegions []string `json:"enabled_regions" required:"true" nullable:"false"` // backward compatible
|
|
||||||
Telemetry *AWSCollectionStrategy `json:"telemetry" required:"true" nullable:"false"` // backward compatible
|
|
||||||
}
|
|
||||||
|
|
||||||
type ProviderIntegrationConfig struct {
|
|
||||||
AWS *AWSIntegrationConfig `json:"aws" required:"true" nullable:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AWSIntegrationConfig struct {
|
|
||||||
EnabledRegions []string `json:"enabledRegions" required:"true" nullable:"false"`
|
|
||||||
Telemetry *AWSCollectionStrategy `json:"telemetry" required:"true" nullable:"false"`
|
|
||||||
}
|
|
||||||
@@ -4,10 +4,7 @@ import (
|
|||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var ErrCodeInvalidCloudRegion = errors.MustNewCode("invalid_cloud_region")
|
||||||
ErrCodeInvalidCloudRegion = errors.MustNewCode("invalid_cloud_region")
|
|
||||||
ErrCodeMismatchCloudProvider = errors.MustNewCode("cloud_provider_mismatch")
|
|
||||||
)
|
|
||||||
|
|
||||||
// List of all valid cloud regions on Amazon Web Services.
|
// List of all valid cloud regions on Amazon Web Services.
|
||||||
var ValidAWSRegions = map[string]struct{}{
|
var ValidAWSRegions = map[string]struct{}{
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package cloudintegrationtypes
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/SigNoz/signoz/pkg/errors"
|
"github.com/SigNoz/signoz/pkg/errors"
|
||||||
@@ -25,6 +26,22 @@ type ServiceConfig struct {
|
|||||||
AWS *AWSServiceConfig `json:"aws" required:"true" nullable:"false"`
|
AWS *AWSServiceConfig `json:"aws" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AWSServiceConfig struct {
|
||||||
|
Logs *AWSServiceLogsConfig `json:"logs"`
|
||||||
|
Metrics *AWSServiceMetricsConfig `json:"metrics"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AWSServiceLogsConfig is AWS specific logs config for a service
|
||||||
|
// NOTE: the JSON keys are snake case for backward compatibility with existing agents.
|
||||||
|
type AWSServiceLogsConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
S3Buckets map[string][]string `json:"s3Buckets,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AWSServiceMetricsConfig struct {
|
||||||
|
Enabled bool `json:"enabled"`
|
||||||
|
}
|
||||||
|
|
||||||
// ServiceMetadata helps to quickly list available services and whether it is enabled or not.
|
// ServiceMetadata helps to quickly list available services and whether it is enabled or not.
|
||||||
// As getting complete service definition is a heavy operation and the response is also large,
|
// As getting complete service definition is a heavy operation and the response is also large,
|
||||||
// initial integration page load can be very slow.
|
// initial integration page load can be very slow.
|
||||||
@@ -45,24 +62,24 @@ type GettableServicesMetadata struct {
|
|||||||
Services []*ServiceMetadata `json:"services" required:"true" nullable:"false"`
|
Services []*ServiceMetadata `json:"services" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Service represents a cloud integration service with its definition,
|
||||||
|
// cloud integration service is non nil only when the service entry exists in DB with ANY config (enabled or disabled).
|
||||||
type Service struct {
|
type Service struct {
|
||||||
ServiceDefinition
|
ServiceDefinition
|
||||||
ServiceConfig *ServiceConfig `json:"serviceConfig" required:"false" nullable:"false"`
|
CloudIntegrationService *CloudIntegrationService `json:"cloudIntegrationService" required:"true" nullable:"true"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GettableService = Service
|
|
||||||
|
|
||||||
type UpdatableService struct {
|
type UpdatableService struct {
|
||||||
Config *ServiceConfig `json:"config" required:"true" nullable:"false"`
|
Config *ServiceConfig `json:"config" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type ServiceDefinition struct {
|
type ServiceDefinition struct {
|
||||||
ServiceDefinitionMetadata
|
ServiceDefinitionMetadata
|
||||||
Overview string `json:"overview" required:"true"` // markdown
|
Overview string `json:"overview" required:"true"` // markdown
|
||||||
Assets Assets `json:"assets" required:"true"`
|
Assets Assets `json:"assets" required:"true"`
|
||||||
SupportedSignals SupportedSignals `json:"supported_signals" required:"true"`
|
SupportedSignals SupportedSignals `json:"supportedSignals" required:"true"`
|
||||||
DataCollected DataCollected `json:"dataCollected" required:"true"`
|
DataCollected DataCollected `json:"dataCollected" required:"true"`
|
||||||
Strategy *CollectionStrategy `json:"telemetryCollectionStrategy" required:"true" nullable:"false"`
|
TelemetryCollectionStrategy *TelemetryCollectionStrategy `json:"telemetryCollectionStrategy" required:"true" nullable:"false"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// SupportedSignals for cloud provider's service.
|
// SupportedSignals for cloud provider's service.
|
||||||
@@ -77,26 +94,10 @@ type DataCollected struct {
|
|||||||
Metrics []CollectedMetric `json:"metrics"`
|
Metrics []CollectedMetric `json:"metrics"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// CollectionStrategy is cloud provider specific configuration for signal collection,
|
// TelemetryCollectionStrategy is cloud provider specific configuration for signal collection,
|
||||||
// this is used by agent to understand the nitty-gritty for collecting telemetry for the cloud provider.
|
// this is used by agent to understand the nitty-gritty for collecting telemetry for the cloud provider.
|
||||||
type CollectionStrategy struct {
|
type TelemetryCollectionStrategy struct {
|
||||||
AWS *AWSCollectionStrategy `json:"aws" required:"true" nullable:"false"`
|
AWS *AWSTelemetryCollectionStrategy `json:"aws" required:"true" nullable:"false"`
|
||||||
}
|
|
||||||
|
|
||||||
type AWSServiceConfig struct {
|
|
||||||
Logs *AWSServiceLogsConfig `json:"logs"`
|
|
||||||
Metrics *AWSServiceMetricsConfig `json:"metrics"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// AWSServiceLogsConfig is AWS specific logs config for a service
|
|
||||||
// NOTE: the JSON keys are snake case for backward compatibility with existing agents.
|
|
||||||
type AWSServiceLogsConfig struct {
|
|
||||||
Enabled bool `json:"enabled"`
|
|
||||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type AWSServiceMetricsConfig struct {
|
|
||||||
Enabled bool `json:"enabled"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Assets represents the collection of dashboards.
|
// Assets represents the collection of dashboards.
|
||||||
@@ -120,46 +121,65 @@ type CollectedMetric struct {
|
|||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// AWSCollectionStrategy represents signal collection strategy for AWS services.
|
// OldAWSCollectionStrategy is the backward-compatible snake_case form of AWSCollectionStrategy,
|
||||||
// this is AWS specific.
|
// used in the legacy integration_config response field for older agents.
|
||||||
// NOTE: this structure is still using snake case, for backward compatibility,
|
type OldAWSCollectionStrategy struct {
|
||||||
// with existing agents.
|
Provider string `json:"provider"`
|
||||||
type AWSCollectionStrategy struct {
|
Metrics *OldAWSMetricsStrategy `json:"aws_metrics,omitempty"`
|
||||||
Metrics *AWSMetricsStrategy `json:"aws_metrics,omitempty"`
|
Logs *OldAWSLogsStrategy `json:"aws_logs,omitempty"`
|
||||||
Logs *AWSLogsStrategy `json:"aws_logs,omitempty"`
|
S3Buckets map[string][]string `json:"s3_buckets,omitempty"`
|
||||||
S3Buckets map[string][]string `json:"s3_buckets,omitempty"` // Only available in S3 Sync Service Type in AWS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// AWSMetricsStrategy represents metrics collection strategy for AWS services.
|
// OldAWSMetricsStrategy is the snake_case form of AWSMetricsStrategy for older agents.
|
||||||
// this is AWS specific.
|
type OldAWSMetricsStrategy struct {
|
||||||
// NOTE: this structure is still using snake case, for backward compatibility,
|
|
||||||
// with existing agents.
|
|
||||||
type AWSMetricsStrategy struct {
|
|
||||||
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
|
|
||||||
StreamFilters []struct {
|
StreamFilters []struct {
|
||||||
// json tags here are in the shape expected by AWS API as detailed at
|
|
||||||
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
|
|
||||||
Namespace string `json:"Namespace"`
|
Namespace string `json:"Namespace"`
|
||||||
MetricNames []string `json:"MetricNames,omitempty"`
|
MetricNames []string `json:"MetricNames,omitempty"`
|
||||||
} `json:"cloudwatch_metric_stream_filters"`
|
} `json:"cloudwatch_metric_stream_filters"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// AWSLogsStrategy represents logs collection strategy for AWS services.
|
// OldAWSLogsStrategy is the snake_case form of AWSLogsStrategy for older agents.
|
||||||
// this is AWS specific.
|
type OldAWSLogsStrategy struct {
|
||||||
// NOTE: this structure is still using snake case, for backward compatibility,
|
|
||||||
// with existing agents.
|
|
||||||
type AWSLogsStrategy struct {
|
|
||||||
Subscriptions []struct {
|
Subscriptions []struct {
|
||||||
// subscribe to all logs groups with specified prefix.
|
|
||||||
// eg: `/aws/rds/`
|
|
||||||
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
||||||
|
FilterPattern string `json:"filter_pattern"`
|
||||||
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
|
|
||||||
// "" implies no filtering is required.
|
|
||||||
FilterPattern string `json:"filter_pattern"`
|
|
||||||
} `json:"cloudwatch_logs_subscriptions"`
|
} `json:"cloudwatch_logs_subscriptions"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AWSTelemetryCollectionStrategy represents signal collection strategy for AWS services.
|
||||||
|
type AWSTelemetryCollectionStrategy struct {
|
||||||
|
Metrics *AWSMetricsCollectionStrategy `json:"metrics,omitempty" required:"false" nullable:"false"`
|
||||||
|
Logs *AWSLogsCollectionStrategy `json:"logs,omitempty" required:"false" nullable:"false"`
|
||||||
|
S3Buckets map[string][]string `json:"s3Buckets,omitempty" required:"false"` // Only available in S3 Sync Service Type in AWS
|
||||||
|
}
|
||||||
|
|
||||||
|
// AWSMetricsCollectionStrategy represents metrics collection strategy for AWS services.
|
||||||
|
type AWSMetricsCollectionStrategy struct {
|
||||||
|
// to be used as https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-metricstream.html#cfn-cloudwatch-metricstream-includefilters
|
||||||
|
StreamFilters []*AWSCloudWatchMetricStreamFilter `json:"streamFilters" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AWSCloudWatchMetricStreamFilter struct {
|
||||||
|
// https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-metricstream-metricstreamfilter.html
|
||||||
|
Namespace string `json:"namespace" required:"true"`
|
||||||
|
MetricNames []string `json:"metricNames,omitempty" required:"false" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AWSLogsCollectionStrategy represents logs collection strategy for AWS services.
|
||||||
|
type AWSLogsCollectionStrategy struct {
|
||||||
|
Subscriptions []*AWSCloudWatchLogsSubscription `json:"subscriptions" required:"true" nullable:"false"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AWSCloudWatchLogsSubscription struct {
|
||||||
|
// subscribe to all logs groups with specified prefix.
|
||||||
|
// eg: `/aws/rds/`
|
||||||
|
LogGroupNamePrefix string `json:"logGroupNamePrefix" required:"true"`
|
||||||
|
|
||||||
|
// https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html
|
||||||
|
// "" implies no filtering is required
|
||||||
|
FilterPattern string `json:"filterPattern" required:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
// Dashboard represents a dashboard definition for cloud integration.
|
// Dashboard represents a dashboard definition for cloud integration.
|
||||||
// This is used to show available pre-made dashboards for a service,
|
// This is used to show available pre-made dashboards for a service,
|
||||||
// hence has additional fields like id, title and description.
|
// hence has additional fields like id, title and description.
|
||||||
@@ -170,12 +190,156 @@ type Dashboard struct {
|
|||||||
Definition dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
Definition dashboardtypes.StorableDashboardData `json:"definition,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// UTILS
|
func NewCloudIntegrationService(serviceID ServiceID, cloudIntegrationID valuer.UUID, config *ServiceConfig) *CloudIntegrationService {
|
||||||
|
return &CloudIntegrationService{
|
||||||
|
Identifiable: types.Identifiable{
|
||||||
|
ID: valuer.GenerateUUID(),
|
||||||
|
},
|
||||||
|
TimeAuditable: types.TimeAuditable{
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
},
|
||||||
|
Type: serviceID,
|
||||||
|
Config: config,
|
||||||
|
CloudIntegrationID: cloudIntegrationID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCloudIntegrationServiceFromStorable(stored *StorableCloudIntegrationService, config *ServiceConfig) *CloudIntegrationService {
|
||||||
|
return &CloudIntegrationService{
|
||||||
|
Identifiable: stored.Identifiable,
|
||||||
|
TimeAuditable: stored.TimeAuditable,
|
||||||
|
Type: stored.Type,
|
||||||
|
Config: config,
|
||||||
|
CloudIntegrationID: stored.CloudIntegrationID,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewServiceMetadata(definition ServiceDefinition, enabled bool) *ServiceMetadata {
|
||||||
|
return &ServiceMetadata{
|
||||||
|
ServiceDefinitionMetadata: definition.ServiceDefinitionMetadata,
|
||||||
|
Enabled: enabled,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewService(def ServiceDefinition, storableService *CloudIntegrationService) *Service {
|
||||||
|
return &Service{
|
||||||
|
ServiceDefinition: def,
|
||||||
|
CloudIntegrationService: storableService,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewServiceConfigFromJSON(provider CloudProviderType, jsonString string) (*ServiceConfig, error) {
|
||||||
|
storableServiceConfig, err := newStorableServiceConfigFromJSON(provider, jsonString)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
awsServiceConfig := new(AWSServiceConfig)
|
||||||
|
|
||||||
|
if storableServiceConfig.AWS.Logs != nil {
|
||||||
|
awsServiceConfig.Logs = &AWSServiceLogsConfig{
|
||||||
|
Enabled: storableServiceConfig.AWS.Logs.Enabled,
|
||||||
|
S3Buckets: storableServiceConfig.AWS.Logs.S3Buckets,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if storableServiceConfig.AWS.Metrics != nil {
|
||||||
|
awsServiceConfig.Metrics = &AWSServiceMetricsConfig{
|
||||||
|
Enabled: storableServiceConfig.AWS.Metrics.Enabled,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &ServiceConfig{AWS: awsServiceConfig}, nil
|
||||||
|
default:
|
||||||
|
return nil, errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update sets the service config.
|
||||||
|
func (service *CloudIntegrationService) Update(config *ServiceConfig) {
|
||||||
|
service.Config = config
|
||||||
|
service.UpdatedAt = time.Now()
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsServiceEnabled returns true if the service has at least one signal (logs or metrics) enabled
|
||||||
|
// for the given cloud provider.
|
||||||
|
func (config *ServiceConfig) IsServiceEnabled(provider CloudProviderType) bool {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
logsEnabled := config.AWS.Logs != nil && config.AWS.Logs.Enabled
|
||||||
|
metricsEnabled := config.AWS.Metrics != nil && config.AWS.Metrics.Enabled
|
||||||
|
return logsEnabled || metricsEnabled
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsMetricsEnabled returns true if metrics are explicitly enabled for the given cloud provider.
|
||||||
|
// Used to gate dashboard availability — dashboards are only shown when metrics are enabled.
|
||||||
|
func (config *ServiceConfig) IsMetricsEnabled(provider CloudProviderType) bool {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
return config.AWS.Metrics != nil && config.AWS.Metrics.Enabled
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsLogsEnabled returns true if logs are explicitly enabled for the given cloud provider.
|
||||||
|
func (config *ServiceConfig) IsLogsEnabled(provider CloudProviderType) bool {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
return config.AWS.Logs != nil && config.AWS.Logs.Enabled
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (config *ServiceConfig) ToJSON(provider CloudProviderType, serviceID ServiceID, supportedSignals *SupportedSignals) ([]byte, error) {
|
||||||
|
storableServiceConfig := newStorableServiceConfig(provider, serviceID, config, supportedSignals)
|
||||||
|
return storableServiceConfig.toJSON(provider)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (updatableService *UpdatableService) Validate(provider CloudProviderType, serviceID ServiceID) error {
|
||||||
|
switch provider {
|
||||||
|
case CloudProviderTypeAWS:
|
||||||
|
if updatableService.Config.AWS == nil {
|
||||||
|
return errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "AWS config is required for AWS service")
|
||||||
|
}
|
||||||
|
|
||||||
|
if serviceID == AWSServiceS3Sync {
|
||||||
|
if updatableService.Config.AWS.Logs == nil || updatableService.Config.AWS.Logs.S3Buckets == nil {
|
||||||
|
return errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "AWS S3 Sync service requires S3 bucket configuration for logs")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
return errors.NewInvalidInputf(ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// GetCloudIntegrationDashboardID returns the dashboard id for a cloud integration, given the cloud provider, service id, and dashboard id.
|
// GetCloudIntegrationDashboardID returns the dashboard id for a cloud integration, given the cloud provider, service id, and dashboard id.
|
||||||
// This is used to generate unique dashboard ids for cloud integration, and also to parse the dashboard id to get the cloud provider and service id when needed.
|
// This is used to generate unique dashboard ids for cloud integration, and also to parse the dashboard id to get the cloud provider and service id when needed.
|
||||||
func GetCloudIntegrationDashboardID(cloudProvider CloudProviderType, svcID, dashboardID string) string {
|
func GetCloudIntegrationDashboardID(cloudProvider CloudProviderType, svcID, dashboardID string) string {
|
||||||
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider, svcID, dashboardID)
|
return fmt.Sprintf("cloud-integration--%s--%s--%s", cloudProvider.StringValue(), svcID, dashboardID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ParseCloudIntegrationDashboardID parses a dashboard id generated by GetCloudIntegrationDashboardID
|
||||||
|
// into its constituent parts (cloudProvider, serviceID, dashboardID).
|
||||||
|
func ParseCloudIntegrationDashboardID(id string) (CloudProviderType, string, string, error) {
|
||||||
|
parts := strings.SplitN(id, "--", 4)
|
||||||
|
if len(parts) != 4 || parts[0] != "cloud-integration" {
|
||||||
|
return CloudProviderType{}, "", "", errors.New(errors.TypeNotFound, ErrCodeCloudIntegrationNotFound, "invalid cloud integration dashboard id")
|
||||||
|
}
|
||||||
|
provider, err := NewCloudProvider(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
return CloudProviderType{}, "", "", err
|
||||||
|
}
|
||||||
|
return provider, parts[2], parts[3], nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetDashboardsFromAssets returns the list of dashboards for the cloud provider service from definition.
|
// GetDashboardsFromAssets returns the list of dashboards for the cloud provider service from definition.
|
||||||
@@ -189,9 +353,9 @@ func GetDashboardsFromAssets(
|
|||||||
dashboards := make([]*dashboardtypes.Dashboard, 0)
|
dashboards := make([]*dashboardtypes.Dashboard, 0)
|
||||||
|
|
||||||
for _, d := range assets.Dashboards {
|
for _, d := range assets.Dashboards {
|
||||||
author := fmt.Sprintf("%s-integration", cloudProvider)
|
author := fmt.Sprintf("%s-integration", cloudProvider.StringValue())
|
||||||
dashboards = append(dashboards, &dashboardtypes.Dashboard{
|
dashboards = append(dashboards, &dashboardtypes.Dashboard{
|
||||||
ID: GetCloudIntegrationDashboardID(cloudProvider, svcID, d.ID),
|
ID: d.ID,
|
||||||
Locked: true,
|
Locked: true,
|
||||||
OrgID: orgID,
|
OrgID: orgID,
|
||||||
Data: d.Definition,
|
Data: d.Definition,
|
||||||
@@ -208,3 +372,53 @@ func GetDashboardsFromAssets(
|
|||||||
|
|
||||||
return dashboards
|
return dashboards
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// awsOlderIntegrationConfig converts a ProviderIntegrationConfig into the legacy snake_case
|
||||||
|
// IntegrationConfig format consumed by older AWS agents. Returns nil if AWS config is absent.
|
||||||
|
func awsOlderIntegrationConfig(cfg *ProviderIntegrationConfig) *IntegrationConfig {
|
||||||
|
if cfg == nil || cfg.AWS == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
awsCfg := cfg.AWS
|
||||||
|
|
||||||
|
older := &IntegrationConfig{
|
||||||
|
EnabledRegions: awsCfg.EnabledRegions,
|
||||||
|
}
|
||||||
|
|
||||||
|
if awsCfg.TelemetryCollectionStrategy == nil {
|
||||||
|
return older
|
||||||
|
}
|
||||||
|
|
||||||
|
// Older agents expect a "provider" field and fully snake_case keys inside telemetry.
|
||||||
|
oldTelemetry := &OldAWSCollectionStrategy{
|
||||||
|
Provider: CloudProviderTypeAWS.StringValue(),
|
||||||
|
S3Buckets: awsCfg.TelemetryCollectionStrategy.S3Buckets,
|
||||||
|
}
|
||||||
|
|
||||||
|
if awsCfg.TelemetryCollectionStrategy.Metrics != nil {
|
||||||
|
// Convert camelCase cloudwatchMetricStreamFilters → snake_case cloudwatch_metric_stream_filters
|
||||||
|
oldMetrics := &OldAWSMetricsStrategy{}
|
||||||
|
for _, f := range awsCfg.TelemetryCollectionStrategy.Metrics.StreamFilters {
|
||||||
|
oldMetrics.StreamFilters = append(oldMetrics.StreamFilters, struct {
|
||||||
|
Namespace string `json:"Namespace"`
|
||||||
|
MetricNames []string `json:"MetricNames,omitempty"`
|
||||||
|
}{Namespace: f.Namespace, MetricNames: f.MetricNames})
|
||||||
|
}
|
||||||
|
oldTelemetry.Metrics = oldMetrics
|
||||||
|
}
|
||||||
|
|
||||||
|
if awsCfg.TelemetryCollectionStrategy.Logs != nil {
|
||||||
|
// Convert camelCase cloudwatchLogsSubscriptions → snake_case cloudwatch_logs_subscriptions
|
||||||
|
oldLogs := &OldAWSLogsStrategy{}
|
||||||
|
for _, s := range awsCfg.TelemetryCollectionStrategy.Logs.Subscriptions {
|
||||||
|
oldLogs.Subscriptions = append(oldLogs.Subscriptions, struct {
|
||||||
|
LogGroupNamePrefix string `json:"log_group_name_prefix"`
|
||||||
|
FilterPattern string `json:"filter_pattern"`
|
||||||
|
}{LogGroupNamePrefix: s.LogGroupNamePrefix, FilterPattern: s.FilterPattern})
|
||||||
|
}
|
||||||
|
oldTelemetry.Logs = oldLogs
|
||||||
|
}
|
||||||
|
|
||||||
|
older.Telemetry = oldTelemetry
|
||||||
|
return older
|
||||||
|
}
|
||||||
|
|||||||
@@ -38,6 +38,8 @@ type Store interface {
|
|||||||
|
|
||||||
// UpdateService updates an existing cloud integration service
|
// UpdateService updates an existing cloud integration service
|
||||||
UpdateService(ctx context.Context, service *StorableCloudIntegrationService) error
|
UpdateService(ctx context.Context, service *StorableCloudIntegrationService) error
|
||||||
|
|
||||||
|
RunInTx(context.Context, func(ctx context.Context) error) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type ServiceDefinitionStore interface {
|
type ServiceDefinitionStore interface {
|
||||||
|
|||||||
@@ -7,11 +7,13 @@ type Source struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
SourceAudit = Source{valuer.NewString("audit")}
|
||||||
SourceMeter = Source{valuer.NewString("meter")}
|
SourceMeter = Source{valuer.NewString("meter")}
|
||||||
SourceUnspecified = Source{valuer.NewString("")}
|
SourceUnspecified = Source{valuer.NewString("")}
|
||||||
)
|
)
|
||||||
|
|
||||||
// Enum returns the acceptable values for Source.
|
// Enum returns the acceptable values for Source.
|
||||||
|
// TODO: Add SourceAudit once the frontend is ready for consumption.
|
||||||
func (Source) Enum() []any {
|
func (Source) Enum() []any {
|
||||||
return []any{
|
return []any{
|
||||||
SourceMeter,
|
SourceMeter,
|
||||||
|
|||||||
247
pkg/types/tracedetailtypes/waterfall.go
Normal file
247
pkg/types/tracedetailtypes/waterfall.go
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
package tracedetailtypes
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"maps"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/SigNoz/signoz/pkg/types/cachetypes"
|
||||||
|
)
|
||||||
|
|
||||||
|
// WaterfallRequest is the request body for the v3 waterfall API.
|
||||||
|
type WaterfallRequest struct {
|
||||||
|
SelectedSpanID string `json:"selectedSpanId"`
|
||||||
|
UncollapsedSpans []string `json:"uncollapsedSpans"`
|
||||||
|
Limit uint `json:"limit"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// WaterfallResponse is the response for the v3 waterfall API.
|
||||||
|
type WaterfallResponse struct {
|
||||||
|
StartTimestampMillis uint64 `json:"startTimestampMillis"`
|
||||||
|
EndTimestampMillis uint64 `json:"endTimestampMillis"`
|
||||||
|
DurationNano uint64 `json:"durationNano"`
|
||||||
|
RootServiceName string `json:"rootServiceName"`
|
||||||
|
RootServiceEntryPoint string `json:"rootServiceEntryPoint"`
|
||||||
|
TotalSpansCount uint64 `json:"totalSpansCount"`
|
||||||
|
TotalErrorSpansCount uint64 `json:"totalErrorSpansCount"`
|
||||||
|
ServiceNameToTotalDurationMap map[string]uint64 `json:"serviceNameToTotalDurationMap"`
|
||||||
|
Spans []*WaterfallSpan `json:"spans"`
|
||||||
|
HasMissingSpans bool `json:"hasMissingSpans"`
|
||||||
|
UncollapsedSpans []string `json:"uncollapsedSpans"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Event represents a span event.
|
||||||
|
type Event struct {
|
||||||
|
Name string `json:"name,omitempty"`
|
||||||
|
TimeUnixNano uint64 `json:"timeUnixNano,omitempty"`
|
||||||
|
AttributeMap map[string]any `json:"attributeMap,omitempty"`
|
||||||
|
IsError bool `json:"isError,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// WaterfallSpan represents the span in waterfall response,
|
||||||
|
// this uses snake_case keys for response as a special case since these
|
||||||
|
// keys can be directly used to query spans and client need to know the actual fields.
|
||||||
|
// This pattern should not be copied elsewhere.
|
||||||
|
type WaterfallSpan struct {
|
||||||
|
Attributes map[string]any `json:"attributes"`
|
||||||
|
DBName string `json:"db_name"`
|
||||||
|
DBOperation string `json:"db_operation"`
|
||||||
|
DurationNano uint64 `json:"duration_nano"`
|
||||||
|
Events []Event `json:"events"`
|
||||||
|
ExternalHTTPMethod string `json:"external_http_method"`
|
||||||
|
ExternalHTTPURL string `json:"external_http_url"`
|
||||||
|
Flags uint32 `json:"flags"`
|
||||||
|
HasError bool `json:"has_error"`
|
||||||
|
HTTPHost string `json:"http_host"`
|
||||||
|
HTTPMethod string `json:"http_method"`
|
||||||
|
HTTPURL string `json:"http_url"`
|
||||||
|
IsRemote string `json:"is_remote"`
|
||||||
|
Kind int32 `json:"kind"`
|
||||||
|
KindString string `json:"kind_string"`
|
||||||
|
Links string `json:"links"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
ParentSpanID string `json:"parent_span_id"`
|
||||||
|
Resource map[string]string `json:"resource"`
|
||||||
|
ResponseStatusCode string `json:"response_status_code"`
|
||||||
|
SpanID string `json:"span_id"`
|
||||||
|
StatusCode int32 `json:"status_code"`
|
||||||
|
StatusCodeString string `json:"status_code_string"`
|
||||||
|
StatusMessage string `json:"status_message"`
|
||||||
|
Timestamp string `json:"timestamp"`
|
||||||
|
TraceID string `json:"trace_id"`
|
||||||
|
TraceState string `json:"trace_state"`
|
||||||
|
|
||||||
|
// Tree structure fields
|
||||||
|
Children []*WaterfallSpan `json:"-"`
|
||||||
|
SubTreeNodeCount uint64 `json:"sub_tree_node_count"`
|
||||||
|
HasChildren bool `json:"has_children"`
|
||||||
|
Level uint64 `json:"level"`
|
||||||
|
|
||||||
|
// timeUnixNano is an internal field used for tree building and sorting.
|
||||||
|
// It is not serialized in the JSON response.
|
||||||
|
TimeUnixNano uint64 `json:"-"`
|
||||||
|
// serviceName is an internal field used for service time calculation.
|
||||||
|
ServiceName string `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// CopyWithoutChildren creates a shallow copy and reset computed tree fields.
|
||||||
|
func (s *WaterfallSpan) CopyWithoutChildren(level uint64) *WaterfallSpan {
|
||||||
|
cp := *s
|
||||||
|
cp.Level = level
|
||||||
|
cp.HasChildren = len(s.Children) > 0
|
||||||
|
cp.Children = make([]*WaterfallSpan, 0)
|
||||||
|
cp.SubTreeNodeCount = 0
|
||||||
|
return &cp
|
||||||
|
}
|
||||||
|
|
||||||
|
// SpanModel is the ClickHouse scan struct for the v3 waterfall query.
|
||||||
|
type SpanModel struct {
|
||||||
|
TimeUnixNano time.Time `ch:"timestamp"`
|
||||||
|
DurationNano uint64 `ch:"duration_nano"`
|
||||||
|
SpanID string `ch:"span_id"`
|
||||||
|
TraceID string `ch:"trace_id"`
|
||||||
|
HasError bool `ch:"has_error"`
|
||||||
|
Kind int8 `ch:"kind"`
|
||||||
|
ServiceName string `ch:"resource_string_service$$name"`
|
||||||
|
Name string `ch:"name"`
|
||||||
|
References string `ch:"references"`
|
||||||
|
AttributesString map[string]string `ch:"attributes_string"`
|
||||||
|
AttributesNumber map[string]float64 `ch:"attributes_number"`
|
||||||
|
AttributesBool map[string]bool `ch:"attributes_bool"`
|
||||||
|
ResourcesString map[string]string `ch:"resources_string"`
|
||||||
|
Events []string `ch:"events"`
|
||||||
|
StatusMessage string `ch:"status_message"`
|
||||||
|
StatusCodeString string `ch:"status_code_string"`
|
||||||
|
SpanKind string `ch:"kind_string"`
|
||||||
|
ParentSpanID string `ch:"parent_span_id"`
|
||||||
|
Flags uint32 `ch:"flags"`
|
||||||
|
IsRemote string `ch:"is_remote"`
|
||||||
|
TraceState string `ch:"trace_state"`
|
||||||
|
StatusCode int32 `ch:"status_code"`
|
||||||
|
DBName string `ch:"db_name"`
|
||||||
|
DBOperation string `ch:"db_operation"`
|
||||||
|
HTTPMethod string `ch:"http_method"`
|
||||||
|
HTTPURL string `ch:"http_url"`
|
||||||
|
HTTPHost string `ch:"http_host"`
|
||||||
|
ExternalHTTPMethod string `ch:"external_http_method"`
|
||||||
|
ExternalHTTPURL string `ch:"external_http_url"`
|
||||||
|
ResponseStatusCode string `ch:"response_status_code"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToSpan converts a SpanModel (ClickHouse scan result) into a Span for the waterfall response.
|
||||||
|
func (item *SpanModel) ToSpan() *WaterfallSpan {
|
||||||
|
// Merge attributes_string, attributes_number, attributes_bool preserving native types
|
||||||
|
attributes := make(map[string]any, len(item.AttributesString)+len(item.AttributesNumber)+len(item.AttributesBool))
|
||||||
|
for k, v := range item.AttributesString {
|
||||||
|
attributes[k] = v
|
||||||
|
}
|
||||||
|
for k, v := range item.AttributesNumber {
|
||||||
|
attributes[k] = v
|
||||||
|
}
|
||||||
|
for k, v := range item.AttributesBool {
|
||||||
|
attributes[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
resources := make(map[string]string)
|
||||||
|
maps.Copy(resources, item.ResourcesString)
|
||||||
|
|
||||||
|
events := make([]Event, 0, len(item.Events))
|
||||||
|
for _, eventStr := range item.Events {
|
||||||
|
var event Event
|
||||||
|
if err := json.Unmarshal([]byte(eventStr), &event); err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
events = append(events, event)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &WaterfallSpan{
|
||||||
|
Attributes: attributes,
|
||||||
|
DBName: item.DBName,
|
||||||
|
DBOperation: item.DBOperation,
|
||||||
|
DurationNano: item.DurationNano,
|
||||||
|
Events: events,
|
||||||
|
ExternalHTTPMethod: item.ExternalHTTPMethod,
|
||||||
|
ExternalHTTPURL: item.ExternalHTTPURL,
|
||||||
|
Flags: item.Flags,
|
||||||
|
HasError: item.HasError,
|
||||||
|
HTTPHost: item.HTTPHost,
|
||||||
|
HTTPMethod: item.HTTPMethod,
|
||||||
|
HTTPURL: item.HTTPURL,
|
||||||
|
IsRemote: item.IsRemote,
|
||||||
|
Kind: int32(item.Kind),
|
||||||
|
KindString: item.SpanKind,
|
||||||
|
Links: item.References,
|
||||||
|
Name: item.Name,
|
||||||
|
ParentSpanID: item.ParentSpanID,
|
||||||
|
Resource: resources,
|
||||||
|
ResponseStatusCode: item.ResponseStatusCode,
|
||||||
|
SpanID: item.SpanID,
|
||||||
|
StatusCode: item.StatusCode,
|
||||||
|
StatusCodeString: item.StatusCodeString,
|
||||||
|
StatusMessage: item.StatusMessage,
|
||||||
|
Timestamp: item.TimeUnixNano.Format(time.RFC3339Nano),
|
||||||
|
TraceID: item.TraceID,
|
||||||
|
TraceState: item.TraceState,
|
||||||
|
Children: make([]*WaterfallSpan, 0),
|
||||||
|
TimeUnixNano: uint64(item.TimeUnixNano.UnixNano()),
|
||||||
|
ServiceName: item.ServiceName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TraceSummary is the ClickHouse scan struct for the trace_summary query.
|
||||||
|
type TraceSummary struct {
|
||||||
|
TraceID string `ch:"trace_id"`
|
||||||
|
Start time.Time `ch:"start"`
|
||||||
|
End time.Time `ch:"end"`
|
||||||
|
NumSpans uint64 `ch:"num_spans"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// OtelSpanRef is used for parsing the references/links JSON from ClickHouse.
|
||||||
|
type OtelSpanRef struct {
|
||||||
|
TraceId string `json:"traceId,omitempty"`
|
||||||
|
SpanId string `json:"spanId,omitempty"`
|
||||||
|
RefType string `json:"refType,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// WaterfallCache holds pre-processed trace data for caching.
|
||||||
|
type WaterfallCache struct {
|
||||||
|
StartTime uint64 `json:"startTime"`
|
||||||
|
EndTime uint64 `json:"endTime"`
|
||||||
|
DurationNano uint64 `json:"durationNano"`
|
||||||
|
TotalSpans uint64 `json:"totalSpans"`
|
||||||
|
TotalErrorSpans uint64 `json:"totalErrorSpans"`
|
||||||
|
ServiceNameToTotalDurationMap map[string]uint64 `json:"serviceNameToTotalDurationMap"`
|
||||||
|
SpanIDToSpanNodeMap map[string]*WaterfallSpan `json:"spanIdToSpanNodeMap"`
|
||||||
|
TraceRoots []*WaterfallSpan `json:"traceRoots"`
|
||||||
|
HasMissingSpans bool `json:"hasMissingSpans"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WaterfallCache) Clone() cachetypes.Cacheable {
|
||||||
|
copyOfServiceNameToTotalDurationMap := make(map[string]uint64)
|
||||||
|
maps.Copy(copyOfServiceNameToTotalDurationMap, c.ServiceNameToTotalDurationMap)
|
||||||
|
|
||||||
|
copyOfSpanIDToSpanNodeMap := make(map[string]*WaterfallSpan)
|
||||||
|
maps.Copy(copyOfSpanIDToSpanNodeMap, c.SpanIDToSpanNodeMap)
|
||||||
|
|
||||||
|
copyOfTraceRoots := make([]*WaterfallSpan, len(c.TraceRoots))
|
||||||
|
copy(copyOfTraceRoots, c.TraceRoots)
|
||||||
|
return &WaterfallCache{
|
||||||
|
StartTime: c.StartTime,
|
||||||
|
EndTime: c.EndTime,
|
||||||
|
DurationNano: c.DurationNano,
|
||||||
|
TotalSpans: c.TotalSpans,
|
||||||
|
TotalErrorSpans: c.TotalErrorSpans,
|
||||||
|
ServiceNameToTotalDurationMap: copyOfServiceNameToTotalDurationMap,
|
||||||
|
SpanIDToSpanNodeMap: copyOfSpanIDToSpanNodeMap,
|
||||||
|
TraceRoots: copyOfTraceRoots,
|
||||||
|
HasMissingSpans: c.HasMissingSpans,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WaterfallCache) MarshalBinary() (data []byte, err error) {
|
||||||
|
return json.Marshal(c)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WaterfallCache) UnmarshalBinary(data []byte) error {
|
||||||
|
return json.Unmarshal(data, c)
|
||||||
|
}
|
||||||
@@ -12,6 +12,7 @@ pytest_plugins = [
|
|||||||
"fixtures.sqlite",
|
"fixtures.sqlite",
|
||||||
"fixtures.zookeeper",
|
"fixtures.zookeeper",
|
||||||
"fixtures.signoz",
|
"fixtures.signoz",
|
||||||
|
"fixtures.audit",
|
||||||
"fixtures.logs",
|
"fixtures.logs",
|
||||||
"fixtures.traces",
|
"fixtures.traces",
|
||||||
"fixtures.metrics",
|
"fixtures.metrics",
|
||||||
|
|||||||
404
tests/integration/fixtures/audit.py
Normal file
404
tests/integration/fixtures/audit.py
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
from abc import ABC
|
||||||
|
from typing import Any, Callable, Generator, List, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import pytest
|
||||||
|
from ksuid import KsuidMs
|
||||||
|
|
||||||
|
from fixtures import types
|
||||||
|
from fixtures.fingerprint import LogsOrTracesFingerprint
|
||||||
|
|
||||||
|
|
||||||
|
class AuditResource(ABC):
|
||||||
|
labels: str
|
||||||
|
fingerprint: str
|
||||||
|
seen_at_ts_bucket_start: np.int64
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
labels: dict[str, str],
|
||||||
|
fingerprint: str,
|
||||||
|
seen_at_ts_bucket_start: np.int64,
|
||||||
|
) -> None:
|
||||||
|
self.labels = json.dumps(labels, separators=(",", ":"))
|
||||||
|
self.fingerprint = fingerprint
|
||||||
|
self.seen_at_ts_bucket_start = seen_at_ts_bucket_start
|
||||||
|
|
||||||
|
def np_arr(self) -> np.array:
|
||||||
|
return np.array(
|
||||||
|
[
|
||||||
|
self.labels,
|
||||||
|
self.fingerprint,
|
||||||
|
self.seen_at_ts_bucket_start,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AuditResourceOrAttributeKeys(ABC):
|
||||||
|
name: str
|
||||||
|
datatype: str
|
||||||
|
|
||||||
|
def __init__(self, name: str, datatype: str) -> None:
|
||||||
|
self.name = name
|
||||||
|
self.datatype = datatype
|
||||||
|
|
||||||
|
def np_arr(self) -> np.array:
|
||||||
|
return np.array([self.name, self.datatype])
|
||||||
|
|
||||||
|
|
||||||
|
class AuditTagAttributes(ABC):
|
||||||
|
unix_milli: np.int64
|
||||||
|
tag_key: str
|
||||||
|
tag_type: str
|
||||||
|
tag_data_type: str
|
||||||
|
string_value: str
|
||||||
|
int64_value: Optional[np.int64]
|
||||||
|
float64_value: Optional[np.float64]
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
timestamp: datetime.datetime,
|
||||||
|
tag_key: str,
|
||||||
|
tag_type: str,
|
||||||
|
tag_data_type: str,
|
||||||
|
string_value: Optional[str],
|
||||||
|
int64_value: Optional[np.int64],
|
||||||
|
float64_value: Optional[np.float64],
|
||||||
|
) -> None:
|
||||||
|
self.unix_milli = np.int64(int(timestamp.timestamp() * 1e3))
|
||||||
|
self.tag_key = tag_key
|
||||||
|
self.tag_type = tag_type
|
||||||
|
self.tag_data_type = tag_data_type
|
||||||
|
self.string_value = string_value or ""
|
||||||
|
self.int64_value = int64_value
|
||||||
|
self.float64_value = float64_value
|
||||||
|
|
||||||
|
def np_arr(self) -> np.array:
|
||||||
|
return np.array(
|
||||||
|
[
|
||||||
|
self.unix_milli,
|
||||||
|
self.tag_key,
|
||||||
|
self.tag_type,
|
||||||
|
self.tag_data_type,
|
||||||
|
self.string_value,
|
||||||
|
self.int64_value,
|
||||||
|
self.float64_value,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AuditLog(ABC):
|
||||||
|
"""Represents a single audit log event in signoz_audit.
|
||||||
|
|
||||||
|
Matches the ClickHouse DDL from the schema migration (ticket #1936):
|
||||||
|
- Database: signoz_audit
|
||||||
|
- Local table: logs
|
||||||
|
- Distributed table: distributed_logs
|
||||||
|
- No resources_string column (resource JSON only)
|
||||||
|
- Has event_name column
|
||||||
|
- 7 materialized columns auto-populated from attributes_string at INSERT time
|
||||||
|
"""
|
||||||
|
|
||||||
|
ts_bucket_start: np.uint64
|
||||||
|
resource_fingerprint: str
|
||||||
|
timestamp: np.uint64
|
||||||
|
observed_timestamp: np.uint64
|
||||||
|
id: str
|
||||||
|
trace_id: str
|
||||||
|
span_id: str
|
||||||
|
trace_flags: np.uint32
|
||||||
|
severity_text: str
|
||||||
|
severity_number: np.uint8
|
||||||
|
body: str
|
||||||
|
scope_name: str
|
||||||
|
scope_version: str
|
||||||
|
scope_string: dict[str, str]
|
||||||
|
attributes_string: dict[str, str]
|
||||||
|
attributes_number: dict[str, np.float64]
|
||||||
|
attributes_bool: dict[str, bool]
|
||||||
|
resource_json: dict[str, str]
|
||||||
|
event_name: str
|
||||||
|
|
||||||
|
resource: List[AuditResource]
|
||||||
|
tag_attributes: List[AuditTagAttributes]
|
||||||
|
resource_keys: List[AuditResourceOrAttributeKeys]
|
||||||
|
attribute_keys: List[AuditResourceOrAttributeKeys]
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
timestamp: Optional[datetime.datetime] = None,
|
||||||
|
resources: dict[str, Any] = {},
|
||||||
|
attributes: dict[str, Any] = {},
|
||||||
|
body: str = "",
|
||||||
|
event_name: str = "",
|
||||||
|
severity_text: str = "INFO",
|
||||||
|
trace_id: str = "",
|
||||||
|
span_id: str = "",
|
||||||
|
trace_flags: np.uint32 = 0,
|
||||||
|
scope_name: str = "signoz.audit",
|
||||||
|
scope_version: str = "",
|
||||||
|
) -> None:
|
||||||
|
if timestamp is None:
|
||||||
|
timestamp = datetime.datetime.now()
|
||||||
|
self.tag_attributes = []
|
||||||
|
self.attribute_keys = []
|
||||||
|
self.resource_keys = []
|
||||||
|
|
||||||
|
self.timestamp = np.uint64(int(timestamp.timestamp() * 1e9))
|
||||||
|
self.observed_timestamp = self.timestamp
|
||||||
|
|
||||||
|
minute = timestamp.minute
|
||||||
|
bucket_minute = 0 if minute < 30 else 30
|
||||||
|
bucket_start = timestamp.replace(minute=bucket_minute, second=0, microsecond=0)
|
||||||
|
self.ts_bucket_start = np.uint64(int(bucket_start.timestamp()))
|
||||||
|
|
||||||
|
self.id = str(KsuidMs(datetime=timestamp))
|
||||||
|
|
||||||
|
self.trace_id = trace_id
|
||||||
|
self.span_id = span_id
|
||||||
|
self.trace_flags = trace_flags
|
||||||
|
|
||||||
|
self.severity_text = severity_text
|
||||||
|
self.severity_number = np.uint8(9 if severity_text == "INFO" else 17)
|
||||||
|
|
||||||
|
self.body = body
|
||||||
|
self.event_name = event_name
|
||||||
|
|
||||||
|
# Resources — JSON column only (no resources_string in audit DDL)
|
||||||
|
self.resource_json = {k: str(v) for k, v in resources.items()}
|
||||||
|
for k, v in self.resource_json.items():
|
||||||
|
self.tag_attributes.append(
|
||||||
|
AuditTagAttributes(
|
||||||
|
timestamp=timestamp,
|
||||||
|
tag_key=k,
|
||||||
|
tag_type="resource",
|
||||||
|
tag_data_type="string",
|
||||||
|
string_value=str(v),
|
||||||
|
int64_value=None,
|
||||||
|
float64_value=None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.resource_keys.append(
|
||||||
|
AuditResourceOrAttributeKeys(name=k, datatype="string")
|
||||||
|
)
|
||||||
|
|
||||||
|
self.resource_fingerprint = LogsOrTracesFingerprint(
|
||||||
|
self.resource_json
|
||||||
|
).calculate()
|
||||||
|
|
||||||
|
# Process attributes by type
|
||||||
|
self.attributes_string = {}
|
||||||
|
self.attributes_number = {}
|
||||||
|
self.attributes_bool = {}
|
||||||
|
|
||||||
|
for k, v in attributes.items():
|
||||||
|
if isinstance(v, bool):
|
||||||
|
self.attributes_bool[k] = v
|
||||||
|
self.tag_attributes.append(
|
||||||
|
AuditTagAttributes(
|
||||||
|
timestamp=timestamp,
|
||||||
|
tag_key=k,
|
||||||
|
tag_type="tag",
|
||||||
|
tag_data_type="bool",
|
||||||
|
string_value=None,
|
||||||
|
int64_value=None,
|
||||||
|
float64_value=None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.attribute_keys.append(
|
||||||
|
AuditResourceOrAttributeKeys(name=k, datatype="bool")
|
||||||
|
)
|
||||||
|
elif isinstance(v, int):
|
||||||
|
self.attributes_number[k] = np.float64(v)
|
||||||
|
self.tag_attributes.append(
|
||||||
|
AuditTagAttributes(
|
||||||
|
timestamp=timestamp,
|
||||||
|
tag_key=k,
|
||||||
|
tag_type="tag",
|
||||||
|
tag_data_type="int64",
|
||||||
|
string_value=None,
|
||||||
|
int64_value=np.int64(v),
|
||||||
|
float64_value=None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.attribute_keys.append(
|
||||||
|
AuditResourceOrAttributeKeys(name=k, datatype="int64")
|
||||||
|
)
|
||||||
|
elif isinstance(v, float):
|
||||||
|
self.attributes_number[k] = np.float64(v)
|
||||||
|
self.tag_attributes.append(
|
||||||
|
AuditTagAttributes(
|
||||||
|
timestamp=timestamp,
|
||||||
|
tag_key=k,
|
||||||
|
tag_type="tag",
|
||||||
|
tag_data_type="float64",
|
||||||
|
string_value=None,
|
||||||
|
int64_value=None,
|
||||||
|
float64_value=np.float64(v),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.attribute_keys.append(
|
||||||
|
AuditResourceOrAttributeKeys(name=k, datatype="float64")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.attributes_string[k] = str(v)
|
||||||
|
self.tag_attributes.append(
|
||||||
|
AuditTagAttributes(
|
||||||
|
timestamp=timestamp,
|
||||||
|
tag_key=k,
|
||||||
|
tag_type="tag",
|
||||||
|
tag_data_type="string",
|
||||||
|
string_value=str(v),
|
||||||
|
int64_value=None,
|
||||||
|
float64_value=None,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.attribute_keys.append(
|
||||||
|
AuditResourceOrAttributeKeys(name=k, datatype="string")
|
||||||
|
)
|
||||||
|
|
||||||
|
self.scope_name = scope_name
|
||||||
|
self.scope_version = scope_version
|
||||||
|
self.scope_string = {}
|
||||||
|
|
||||||
|
self.resource = [
|
||||||
|
AuditResource(
|
||||||
|
labels=self.resource_json,
|
||||||
|
fingerprint=self.resource_fingerprint,
|
||||||
|
seen_at_ts_bucket_start=self.ts_bucket_start,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
def np_arr(self) -> np.array:
|
||||||
|
return np.array(
|
||||||
|
[
|
||||||
|
self.ts_bucket_start,
|
||||||
|
self.resource_fingerprint,
|
||||||
|
self.timestamp,
|
||||||
|
self.observed_timestamp,
|
||||||
|
self.id,
|
||||||
|
self.trace_id,
|
||||||
|
self.span_id,
|
||||||
|
self.trace_flags,
|
||||||
|
self.severity_text,
|
||||||
|
self.severity_number,
|
||||||
|
self.body,
|
||||||
|
self.scope_name,
|
||||||
|
self.scope_version,
|
||||||
|
self.scope_string,
|
||||||
|
self.attributes_string,
|
||||||
|
self.attributes_number,
|
||||||
|
self.attributes_bool,
|
||||||
|
self.resource_json,
|
||||||
|
self.event_name,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="insert_audit_logs", scope="function")
|
||||||
|
def insert_audit_logs(
|
||||||
|
clickhouse: types.TestContainerClickhouse,
|
||||||
|
) -> Generator[Callable[[List[AuditLog]], None], Any, None]:
|
||||||
|
def _insert_audit_logs(logs: List[AuditLog]) -> None:
|
||||||
|
resources: List[AuditResource] = []
|
||||||
|
for log in logs:
|
||||||
|
resources.extend(log.resource)
|
||||||
|
|
||||||
|
if len(resources) > 0:
|
||||||
|
clickhouse.conn.insert(
|
||||||
|
database="signoz_audit",
|
||||||
|
table="distributed_logs_resource",
|
||||||
|
data=[resource.np_arr() for resource in resources],
|
||||||
|
column_names=[
|
||||||
|
"labels",
|
||||||
|
"fingerprint",
|
||||||
|
"seen_at_ts_bucket_start",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
tag_attributes: List[AuditTagAttributes] = []
|
||||||
|
for log in logs:
|
||||||
|
tag_attributes.extend(log.tag_attributes)
|
||||||
|
|
||||||
|
if len(tag_attributes) > 0:
|
||||||
|
clickhouse.conn.insert(
|
||||||
|
database="signoz_audit",
|
||||||
|
table="distributed_tag_attributes",
|
||||||
|
data=[ta.np_arr() for ta in tag_attributes],
|
||||||
|
column_names=[
|
||||||
|
"unix_milli",
|
||||||
|
"tag_key",
|
||||||
|
"tag_type",
|
||||||
|
"tag_data_type",
|
||||||
|
"string_value",
|
||||||
|
"int64_value",
|
||||||
|
"float64_value",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
attribute_keys: List[AuditResourceOrAttributeKeys] = []
|
||||||
|
for log in logs:
|
||||||
|
attribute_keys.extend(log.attribute_keys)
|
||||||
|
|
||||||
|
if len(attribute_keys) > 0:
|
||||||
|
clickhouse.conn.insert(
|
||||||
|
database="signoz_audit",
|
||||||
|
table="distributed_logs_attribute_keys",
|
||||||
|
data=[ak.np_arr() for ak in attribute_keys],
|
||||||
|
column_names=["name", "datatype"],
|
||||||
|
)
|
||||||
|
|
||||||
|
resource_keys: List[AuditResourceOrAttributeKeys] = []
|
||||||
|
for log in logs:
|
||||||
|
resource_keys.extend(log.resource_keys)
|
||||||
|
|
||||||
|
if len(resource_keys) > 0:
|
||||||
|
clickhouse.conn.insert(
|
||||||
|
database="signoz_audit",
|
||||||
|
table="distributed_logs_resource_keys",
|
||||||
|
data=[rk.np_arr() for rk in resource_keys],
|
||||||
|
column_names=["name", "datatype"],
|
||||||
|
)
|
||||||
|
|
||||||
|
clickhouse.conn.insert(
|
||||||
|
database="signoz_audit",
|
||||||
|
table="distributed_logs",
|
||||||
|
data=[log.np_arr() for log in logs],
|
||||||
|
column_names=[
|
||||||
|
"ts_bucket_start",
|
||||||
|
"resource_fingerprint",
|
||||||
|
"timestamp",
|
||||||
|
"observed_timestamp",
|
||||||
|
"id",
|
||||||
|
"trace_id",
|
||||||
|
"span_id",
|
||||||
|
"trace_flags",
|
||||||
|
"severity_text",
|
||||||
|
"severity_number",
|
||||||
|
"body",
|
||||||
|
"scope_name",
|
||||||
|
"scope_version",
|
||||||
|
"scope_string",
|
||||||
|
"attributes_string",
|
||||||
|
"attributes_number",
|
||||||
|
"attributes_bool",
|
||||||
|
"resource",
|
||||||
|
"event_name",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
yield _insert_audit_logs
|
||||||
|
|
||||||
|
cluster = clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER"]
|
||||||
|
for table in [
|
||||||
|
"logs",
|
||||||
|
"logs_resource",
|
||||||
|
"tag_attributes",
|
||||||
|
"logs_attribute_keys",
|
||||||
|
"logs_resource_keys",
|
||||||
|
]:
|
||||||
|
clickhouse.conn.query(
|
||||||
|
f"TRUNCATE TABLE signoz_audit.{table} ON CLUSTER '{cluster}' SYNC"
|
||||||
|
)
|
||||||
@@ -38,6 +38,7 @@ class OrderBy:
|
|||||||
class BuilderQuery:
|
class BuilderQuery:
|
||||||
signal: str
|
signal: str
|
||||||
name: str = "A"
|
name: str = "A"
|
||||||
|
source: Optional[str] = None
|
||||||
limit: Optional[int] = None
|
limit: Optional[int] = None
|
||||||
filter_expression: Optional[str] = None
|
filter_expression: Optional[str] = None
|
||||||
select_fields: Optional[List[TelemetryFieldKey]] = None
|
select_fields: Optional[List[TelemetryFieldKey]] = None
|
||||||
@@ -48,6 +49,8 @@ class BuilderQuery:
|
|||||||
"signal": self.signal,
|
"signal": self.signal,
|
||||||
"name": self.name,
|
"name": self.name,
|
||||||
}
|
}
|
||||||
|
if self.source:
|
||||||
|
spec["source"] = self.source
|
||||||
if self.limit is not None:
|
if self.limit is not None:
|
||||||
spec["limit"] = self.limit
|
spec["limit"] = self.limit
|
||||||
if self.filter_expression:
|
if self.filter_expression:
|
||||||
@@ -55,7 +58,9 @@ class BuilderQuery:
|
|||||||
if self.select_fields:
|
if self.select_fields:
|
||||||
spec["selectFields"] = [f.to_dict() for f in self.select_fields]
|
spec["selectFields"] = [f.to_dict() for f in self.select_fields]
|
||||||
if self.order:
|
if self.order:
|
||||||
spec["order"] = [o.to_dict() for o in self.order]
|
spec["order"] = [
|
||||||
|
o.to_dict() if hasattr(o, "to_dict") else o for o in self.order
|
||||||
|
]
|
||||||
return {"type": "builder_query", "spec": spec}
|
return {"type": "builder_query", "spec": spec}
|
||||||
|
|
||||||
|
|
||||||
@@ -76,7 +81,9 @@ class TraceOperatorQuery:
|
|||||||
if self.limit is not None:
|
if self.limit is not None:
|
||||||
spec["limit"] = self.limit
|
spec["limit"] = self.limit
|
||||||
if self.order:
|
if self.order:
|
||||||
spec["order"] = [o.to_dict() for o in self.order]
|
spec["order"] = [
|
||||||
|
o.to_dict() if hasattr(o, "to_dict") else o for o in self.order
|
||||||
|
]
|
||||||
return {"type": "builder_trace_operator", "spec": spec}
|
return {"type": "builder_trace_operator", "spec": spec}
|
||||||
|
|
||||||
|
|
||||||
@@ -442,6 +449,7 @@ def build_scalar_query(
|
|||||||
signal: str,
|
signal: str,
|
||||||
aggregations: List[Dict],
|
aggregations: List[Dict],
|
||||||
*,
|
*,
|
||||||
|
source: Optional[str] = None,
|
||||||
group_by: Optional[List[Dict]] = None,
|
group_by: Optional[List[Dict]] = None,
|
||||||
order: Optional[List[Dict]] = None,
|
order: Optional[List[Dict]] = None,
|
||||||
limit: Optional[int] = None,
|
limit: Optional[int] = None,
|
||||||
@@ -458,6 +466,9 @@ def build_scalar_query(
|
|||||||
"aggregations": aggregations,
|
"aggregations": aggregations,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if source:
|
||||||
|
spec["source"] = source
|
||||||
|
|
||||||
if group_by:
|
if group_by:
|
||||||
spec["groupBy"] = group_by
|
spec["groupBy"] = group_by
|
||||||
|
|
||||||
|
|||||||
441
tests/integration/src/auditquerier/01_audit_logs.py
Normal file
441
tests/integration/src/auditquerier/01_audit_logs.py
Normal file
@@ -0,0 +1,441 @@
|
|||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from http import HTTPStatus
|
||||||
|
from typing import Callable, List
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from fixtures import types
|
||||||
|
from fixtures.audit import AuditLog
|
||||||
|
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||||
|
from fixtures.querier import (
|
||||||
|
BuilderQuery,
|
||||||
|
build_logs_aggregation,
|
||||||
|
build_order_by,
|
||||||
|
build_scalar_query,
|
||||||
|
make_query_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_audit_list_all(
|
||||||
|
signoz: types.SigNoz,
|
||||||
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
|
get_token: Callable[[str, str], str],
|
||||||
|
insert_audit_logs: Callable[[List[AuditLog]], None],
|
||||||
|
) -> None:
|
||||||
|
"""List audit events across multiple resource types — verify count, ordering, and fields."""
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
insert_audit_logs(
|
||||||
|
[
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=3),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "alert-rule",
|
||||||
|
"signoz.audit.resource.id": "alert-001",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-010",
|
||||||
|
"signoz.audit.principal.email": "ops@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "create",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="ops@acme.com (user-010) created alert-rule (alert-001)",
|
||||||
|
event_name="alert-rule.created",
|
||||||
|
severity_text="INFO",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=2),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "saved-view",
|
||||||
|
"signoz.audit.resource.id": "view-001",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-010",
|
||||||
|
"signoz.audit.principal.email": "ops@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "update",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="ops@acme.com (user-010) updated saved-view (view-001)",
|
||||||
|
event_name="saved-view.updated",
|
||||||
|
severity_text="INFO",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=1),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "user",
|
||||||
|
"signoz.audit.resource.id": "user-020",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-010",
|
||||||
|
"signoz.audit.principal.email": "ops@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "update",
|
||||||
|
"signoz.audit.action_category": "access_control",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="ops@acme.com (user-010) updated user (user-020)",
|
||||||
|
event_name="user.role.changed",
|
||||||
|
severity_text="INFO",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||||
|
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
response = make_query_request(
|
||||||
|
signoz,
|
||||||
|
token,
|
||||||
|
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
|
||||||
|
end_ms=int(now.timestamp() * 1000),
|
||||||
|
queries=[
|
||||||
|
BuilderQuery(
|
||||||
|
signal="logs",
|
||||||
|
source="audit",
|
||||||
|
limit=100,
|
||||||
|
order=[build_order_by("timestamp"), build_order_by("id")],
|
||||||
|
).to_dict()
|
||||||
|
],
|
||||||
|
request_type="raw",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
assert response.json()["status"] == "success"
|
||||||
|
|
||||||
|
rows = response.json()["data"]["data"]["results"][0]["rows"]
|
||||||
|
assert len(rows) == 3
|
||||||
|
|
||||||
|
# Most recent first
|
||||||
|
assert rows[0]["data"]["event_name"] == "user.role.changed"
|
||||||
|
assert rows[1]["data"]["event_name"] == "saved-view.updated"
|
||||||
|
assert rows[2]["data"]["event_name"] == "alert-rule.created"
|
||||||
|
|
||||||
|
# Verify event_name and body are present
|
||||||
|
assert rows[0]["data"]["body"] == "ops@acme.com (user-010) updated user (user-020)"
|
||||||
|
assert rows[0]["data"]["severity_text"] == "INFO"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"filter_expression,expected_count,expected_event_names",
|
||||||
|
[
|
||||||
|
pytest.param(
|
||||||
|
"signoz.audit.principal.id = 'user-001'",
|
||||||
|
3,
|
||||||
|
{"session.login", "dashboard.updated", "dashboard.created"},
|
||||||
|
id="filter_by_principal_id",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
"signoz.audit.outcome = 'failure'",
|
||||||
|
1,
|
||||||
|
{"dashboard.deleted"},
|
||||||
|
id="filter_by_outcome_failure",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
"signoz.audit.resource.kind = 'dashboard'"
|
||||||
|
" AND signoz.audit.resource.id = 'dash-001'",
|
||||||
|
3,
|
||||||
|
{"dashboard.deleted", "dashboard.updated", "dashboard.created"},
|
||||||
|
id="filter_by_resource_kind_and_id",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
"signoz.audit.principal.type = 'service_account'",
|
||||||
|
1,
|
||||||
|
{"serviceaccount.apikey.created"},
|
||||||
|
id="filter_by_principal_type",
|
||||||
|
),
|
||||||
|
pytest.param(
|
||||||
|
"signoz.audit.resource.kind = 'dashboard'"
|
||||||
|
" AND signoz.audit.action = 'delete'",
|
||||||
|
1,
|
||||||
|
{"dashboard.deleted"},
|
||||||
|
id="filter_by_resource_kind_and_action",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_audit_filter(
|
||||||
|
signoz: types.SigNoz,
|
||||||
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
|
get_token: Callable[[str, str], str],
|
||||||
|
insert_audit_logs: Callable[[List[AuditLog]], None],
|
||||||
|
filter_expression: str,
|
||||||
|
expected_count: int,
|
||||||
|
expected_event_names: set,
|
||||||
|
) -> None:
|
||||||
|
"""Parametrized audit filter tests covering the documented query patterns."""
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
insert_audit_logs(
|
||||||
|
[
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=5),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "dashboard",
|
||||||
|
"signoz.audit.resource.id": "dash-001",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-001",
|
||||||
|
"signoz.audit.principal.email": "alice@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "create",
|
||||||
|
"signoz.audit.action_category": "configuration_change",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="alice@acme.com created dashboard",
|
||||||
|
event_name="dashboard.created",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=4),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "dashboard",
|
||||||
|
"signoz.audit.resource.id": "dash-001",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-001",
|
||||||
|
"signoz.audit.principal.email": "alice@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "update",
|
||||||
|
"signoz.audit.action_category": "configuration_change",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="alice@acme.com updated dashboard",
|
||||||
|
event_name="dashboard.updated",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=3),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "dashboard",
|
||||||
|
"signoz.audit.resource.id": "dash-001",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-002",
|
||||||
|
"signoz.audit.principal.email": "viewer@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "delete",
|
||||||
|
"signoz.audit.action_category": "configuration_change",
|
||||||
|
"signoz.audit.outcome": "failure",
|
||||||
|
"signoz.audit.error.type": "forbidden",
|
||||||
|
"signoz.audit.error.code": "authz_forbidden",
|
||||||
|
},
|
||||||
|
body="viewer@acme.com failed to delete dashboard",
|
||||||
|
event_name="dashboard.deleted",
|
||||||
|
severity_text="ERROR",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=2),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "serviceaccount",
|
||||||
|
"signoz.audit.resource.id": "sa-001",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "sa-001",
|
||||||
|
"signoz.audit.principal.email": "",
|
||||||
|
"signoz.audit.principal.type": "service_account",
|
||||||
|
"signoz.audit.action": "create",
|
||||||
|
"signoz.audit.action_category": "access_control",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="sa-001 created serviceaccount",
|
||||||
|
event_name="serviceaccount.apikey.created",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=1),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "session",
|
||||||
|
"signoz.audit.resource.id": "*",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-001",
|
||||||
|
"signoz.audit.principal.email": "alice@acme.com",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "login",
|
||||||
|
"signoz.audit.action_category": "access_control",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="alice@acme.com login session",
|
||||||
|
event_name="session.login",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||||
|
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
response = make_query_request(
|
||||||
|
signoz,
|
||||||
|
token,
|
||||||
|
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
|
||||||
|
end_ms=int(now.timestamp() * 1000),
|
||||||
|
queries=[
|
||||||
|
BuilderQuery(
|
||||||
|
signal="logs",
|
||||||
|
source="audit",
|
||||||
|
limit=100,
|
||||||
|
filter_expression=filter_expression,
|
||||||
|
order=[build_order_by("timestamp"), build_order_by("id")],
|
||||||
|
).to_dict()
|
||||||
|
],
|
||||||
|
request_type="raw",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
|
||||||
|
rows = response.json()["data"]["data"]["results"][0]["rows"]
|
||||||
|
assert len(rows) == expected_count
|
||||||
|
|
||||||
|
actual_event_names = {row["data"]["event_name"] for row in rows}
|
||||||
|
assert actual_event_names == expected_event_names
|
||||||
|
|
||||||
|
|
||||||
|
def test_audit_scalar_count_failures(
|
||||||
|
signoz: types.SigNoz,
|
||||||
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
|
get_token: Callable[[str, str], str],
|
||||||
|
insert_audit_logs: Callable[[List[AuditLog]], None],
|
||||||
|
) -> None:
|
||||||
|
"""Alert query — count multiple failures from different principals."""
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
insert_audit_logs(
|
||||||
|
[
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=3),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "dashboard",
|
||||||
|
"signoz.audit.resource.id": "dash-100",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-050",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "delete",
|
||||||
|
"signoz.audit.outcome": "failure",
|
||||||
|
},
|
||||||
|
body="user-050 failed to delete dashboard",
|
||||||
|
event_name="dashboard.deleted",
|
||||||
|
severity_text="ERROR",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=2),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "alert-rule",
|
||||||
|
"signoz.audit.resource.id": "alert-200",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-060",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "update",
|
||||||
|
"signoz.audit.outcome": "failure",
|
||||||
|
},
|
||||||
|
body="user-060 failed to update alert-rule",
|
||||||
|
event_name="alert-rule.updated",
|
||||||
|
severity_text="ERROR",
|
||||||
|
),
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=1),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "dashboard",
|
||||||
|
"signoz.audit.resource.id": "dash-100",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-050",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "update",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="user-050 updated dashboard",
|
||||||
|
event_name="dashboard.updated",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||||
|
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
response = make_query_request(
|
||||||
|
signoz,
|
||||||
|
token,
|
||||||
|
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
|
||||||
|
end_ms=int(now.timestamp() * 1000),
|
||||||
|
queries=[
|
||||||
|
build_scalar_query(
|
||||||
|
name="A",
|
||||||
|
signal="logs",
|
||||||
|
source="audit",
|
||||||
|
aggregations=[build_logs_aggregation("count()")],
|
||||||
|
filter_expression="signoz.audit.outcome = 'failure'",
|
||||||
|
)
|
||||||
|
],
|
||||||
|
request_type="scalar",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
assert response.json()["status"] == "success"
|
||||||
|
|
||||||
|
scalar_data = response.json()["data"]["data"]["results"][0].get("data", [])
|
||||||
|
assert len(scalar_data) == 1
|
||||||
|
assert scalar_data[0][0] == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_audit_does_not_leak_into_logs(
|
||||||
|
signoz: types.SigNoz,
|
||||||
|
create_user_admin: None, # pylint: disable=unused-argument
|
||||||
|
get_token: Callable[[str, str], str],
|
||||||
|
insert_audit_logs: Callable[[List[AuditLog]], None],
|
||||||
|
) -> None:
|
||||||
|
"""A single audit event in signoz_audit must not appear in regular log queries."""
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
insert_audit_logs(
|
||||||
|
[
|
||||||
|
AuditLog(
|
||||||
|
timestamp=now - timedelta(seconds=1),
|
||||||
|
resources={
|
||||||
|
"service.name": "signoz",
|
||||||
|
"signoz.audit.resource.kind": "organization",
|
||||||
|
"signoz.audit.resource.id": "org-999",
|
||||||
|
},
|
||||||
|
attributes={
|
||||||
|
"signoz.audit.principal.id": "user-admin",
|
||||||
|
"signoz.audit.principal.type": "user",
|
||||||
|
"signoz.audit.action": "update",
|
||||||
|
"signoz.audit.outcome": "success",
|
||||||
|
},
|
||||||
|
body="user-admin updated organization (org-999)",
|
||||||
|
event_name="organization.updated",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||||
|
|
||||||
|
now = datetime.now(tz=timezone.utc)
|
||||||
|
response = make_query_request(
|
||||||
|
signoz,
|
||||||
|
token,
|
||||||
|
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
|
||||||
|
end_ms=int(now.timestamp() * 1000),
|
||||||
|
queries=[
|
||||||
|
BuilderQuery(
|
||||||
|
signal="logs",
|
||||||
|
limit=100,
|
||||||
|
order=[build_order_by("timestamp"), build_order_by("id")],
|
||||||
|
).to_dict()
|
||||||
|
],
|
||||||
|
request_type="raw",
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == HTTPStatus.OK
|
||||||
|
|
||||||
|
rows = response.json()["data"]["data"]["results"][0].get("rows") or []
|
||||||
|
|
||||||
|
audit_bodies = [
|
||||||
|
row["data"]["body"]
|
||||||
|
for row in rows
|
||||||
|
if "signoz.audit"
|
||||||
|
in row["data"].get("attributes_string", {}).get("signoz.audit.action", "")
|
||||||
|
]
|
||||||
|
assert len(audit_bodies) == 0
|
||||||
Reference in New Issue
Block a user