Compare commits
20 Commits
refactor/c
...
asset-migr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49a0769532 | ||
|
|
b7eac58f61 | ||
|
|
3c561a22f9 | ||
|
|
15d29604f6 | ||
|
|
d677973d56 | ||
|
|
b5b89bb678 | ||
|
|
b5eab118dd | ||
|
|
98d0bdfe49 | ||
|
|
a71006b662 | ||
|
|
769e36ec84 | ||
|
|
363734054f | ||
|
|
726948db9e | ||
|
|
13249b5e69 | ||
|
|
fec24c2cc3 | ||
|
|
0d3226f363 | ||
|
|
419bd60a41 | ||
|
|
b6b689902d | ||
|
|
65402ca367 | ||
|
|
f71d5bf8f1 | ||
|
|
5abfd0732a |
3
.github/workflows/integrationci.yaml
vendored
@@ -55,6 +55,8 @@ jobs:
|
||||
sqlstore-provider:
|
||||
- postgres
|
||||
- sqlite
|
||||
sqlite-mode:
|
||||
- wal
|
||||
clickhouse-version:
|
||||
- 25.5.6
|
||||
- 25.12.5
|
||||
@@ -102,6 +104,7 @@ jobs:
|
||||
--basetemp=./tmp/ \
|
||||
src/${{matrix.src}} \
|
||||
--sqlstore-provider ${{matrix.sqlstore-provider}} \
|
||||
--sqlite-mode ${{matrix.sqlite-mode}} \
|
||||
--postgres-version ${{matrix.postgres-version}} \
|
||||
--clickhouse-version ${{matrix.clickhouse-version}} \
|
||||
--schema-migrator-version ${{matrix.schema-migrator-version}}
|
||||
|
||||
@@ -17,15 +17,11 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/gateway/noopgateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/licensing/nooplicensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
@@ -33,7 +29,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
"github.com/SigNoz/signoz/pkg/zeus/noopzeus"
|
||||
@@ -101,9 +96,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
|
||||
return querier.NewHandler(ps, q, a)
|
||||
},
|
||||
func(_ cloudintegrationtypes.Store, _ global.Global, _ zeus.Zeus, _ gateway.Gateway, _ licensing.Licensing, _ serviceaccount.Module, _ cloudintegration.Config) (cloudintegration.Module, error) {
|
||||
return implcloudintegration.NewModule(), nil
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
|
||||
|
||||
@@ -16,8 +16,6 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/gateway/httpgateway"
|
||||
enterpriselicensing "github.com/SigNoz/signoz/ee/licensing"
|
||||
"github.com/SigNoz/signoz/ee/licensing/httplicensing"
|
||||
"github.com/SigNoz/signoz/ee/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/ee/modules/cloudintegration/implcloudintegration/implcloudprovider"
|
||||
"github.com/SigNoz/signoz/ee/modules/dashboard/impldashboard"
|
||||
eequerier "github.com/SigNoz/signoz/ee/querier"
|
||||
enterpriseapp "github.com/SigNoz/signoz/ee/query-service/app"
|
||||
@@ -31,14 +29,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
pkgcloudintegration "github.com/SigNoz/signoz/pkg/modules/cloudintegration/implcloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
pkgimpldashboard "github.com/SigNoz/signoz/pkg/modules/dashboard/impldashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
@@ -46,7 +40,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore/sqlstorehook"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
@@ -132,6 +125,7 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
return nil, err
|
||||
}
|
||||
return openfgaauthz.NewProviderFactory(sqlstore, openfgaschema.NewSchema().Get(ctx), openfgaDataStore, licensing, dashboardModule), nil
|
||||
|
||||
},
|
||||
func(store sqlstore.SQLStore, settings factory.ProviderSettings, analytics analytics.Analytics, orgGetter organization.Getter, queryParser queryparser.QueryParser, querier querier.Querier, licensing licensing.Licensing) dashboard.Module {
|
||||
return impldashboard.NewModule(pkgimpldashboard.NewStore(store), settings, analytics, orgGetter, queryParser, querier, licensing)
|
||||
@@ -143,21 +137,8 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
|
||||
communityHandler := querier.NewHandler(ps, q, a)
|
||||
return eequerier.NewHandler(ps, q, communityHandler)
|
||||
},
|
||||
func(store cloudintegrationtypes.Store, global global.Global, zeus zeus.Zeus, gateway gateway.Gateway, licensing licensing.Licensing, serviceAccount serviceaccount.Module, config cloudintegration.Config) (cloudintegration.Module, error) {
|
||||
defStore := pkgcloudintegration.NewServiceDefinitionStore()
|
||||
awsCloudProviderModule, err := implcloudprovider.NewAWSCloudProvider(defStore)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
azureCloudProviderModule := implcloudprovider.NewAzureCloudProvider()
|
||||
cloudProvidersMap := map[cloudintegrationtypes.CloudProviderType]cloudintegration.CloudProviderModule{
|
||||
cloudintegrationtypes.CloudProviderTypeAWS: awsCloudProviderModule,
|
||||
cloudintegrationtypes.CloudProviderTypeAzure: azureCloudProviderModule,
|
||||
}
|
||||
|
||||
return implcloudintegration.NewModule(store, global, zeus, gateway, licensing, serviceAccount, cloudProvidersMap, config)
|
||||
},
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "failed to create signoz", errors.Attr(err))
|
||||
return err
|
||||
|
||||
@@ -86,7 +86,7 @@ sqlstore:
|
||||
# The path to the SQLite database file.
|
||||
path: /var/lib/signoz/signoz.db
|
||||
# The journal mode for the sqlite database. Supported values: delete, wal.
|
||||
mode: delete
|
||||
mode: wal
|
||||
# The timeout for the sqlite database to wait for a lock.
|
||||
busy_timeout: 10s
|
||||
# The default transaction locking behavior. Supported values: deferred, immediate, exclusive.
|
||||
@@ -364,10 +364,3 @@ serviceaccount:
|
||||
analytics:
|
||||
# toggle service account analytics
|
||||
enabled: true
|
||||
|
||||
##################### Cloud Integration #####################
|
||||
cloudintegration:
|
||||
# cloud integration agent configuration
|
||||
agent:
|
||||
# The version of the cloud integration agent.
|
||||
version: v0.0.8
|
||||
|
||||
@@ -327,27 +327,6 @@ components:
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
AuthtypesStorableRole:
|
||||
properties:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
orgId:
|
||||
type: string
|
||||
type:
|
||||
type: string
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
AuthtypesTransaction:
|
||||
properties:
|
||||
object:
|
||||
@@ -371,7 +350,7 @@ components:
|
||||
id:
|
||||
type: string
|
||||
role:
|
||||
$ref: '#/components/schemas/AuthtypesStorableRole'
|
||||
$ref: '#/components/schemas/AuthtypesRole'
|
||||
roleId:
|
||||
type: string
|
||||
updatedAt:
|
||||
@@ -381,6 +360,11 @@ components:
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
- userId
|
||||
- roleId
|
||||
- createdAt
|
||||
- updatedAt
|
||||
- role
|
||||
type: object
|
||||
AuthtypesUserWithRoles:
|
||||
properties:
|
||||
@@ -421,11 +405,11 @@ components:
|
||||
type: object
|
||||
CloudintegrationtypesAWSCollectionStrategy:
|
||||
properties:
|
||||
logs:
|
||||
aws_logs:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSLogsStrategy'
|
||||
metrics:
|
||||
aws_metrics:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSMetricsStrategy'
|
||||
s3Buckets:
|
||||
s3_buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
@@ -465,12 +449,12 @@ components:
|
||||
type: object
|
||||
CloudintegrationtypesAWSLogsStrategy:
|
||||
properties:
|
||||
cloudwatchLogsSubscriptions:
|
||||
cloudwatch_logs_subscriptions:
|
||||
items:
|
||||
properties:
|
||||
filterPattern:
|
||||
filter_pattern:
|
||||
type: string
|
||||
logGroupNamePrefix:
|
||||
log_group_name_prefix:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
@@ -478,7 +462,7 @@ components:
|
||||
type: object
|
||||
CloudintegrationtypesAWSMetricsStrategy:
|
||||
properties:
|
||||
cloudwatchMetricStreamFilters:
|
||||
cloudwatch_metric_stream_filters:
|
||||
items:
|
||||
properties:
|
||||
MetricNames:
|
||||
@@ -502,7 +486,7 @@ components:
|
||||
properties:
|
||||
enabled:
|
||||
type: boolean
|
||||
s3Buckets:
|
||||
s3_buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
@@ -577,26 +561,6 @@ components:
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesCloudIntegrationService:
|
||||
nullable: true
|
||||
properties:
|
||||
cloudIntegrationId:
|
||||
type: string
|
||||
config:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesServiceConfig'
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesServiceID'
|
||||
updatedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
required:
|
||||
- id
|
||||
type: object
|
||||
CloudintegrationtypesCollectedLogAttribute:
|
||||
properties:
|
||||
name:
|
||||
@@ -632,16 +596,6 @@ components:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesConnectionArtifactRequest:
|
||||
properties:
|
||||
config:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesConnectionArtifactRequestConfig'
|
||||
credentials:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesSignozCredentials'
|
||||
required:
|
||||
- config
|
||||
- credentials
|
||||
type: object
|
||||
CloudintegrationtypesConnectionArtifactRequestConfig:
|
||||
properties:
|
||||
aws:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSConnectionArtifactRequest'
|
||||
@@ -740,54 +694,11 @@ components:
|
||||
type: string
|
||||
type: array
|
||||
telemetry:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesOldAWSCollectionStrategy'
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAWSCollectionStrategy'
|
||||
required:
|
||||
- enabled_regions
|
||||
- telemetry
|
||||
type: object
|
||||
CloudintegrationtypesOldAWSCollectionStrategy:
|
||||
properties:
|
||||
aws_logs:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesOldAWSLogsStrategy'
|
||||
aws_metrics:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesOldAWSMetricsStrategy'
|
||||
provider:
|
||||
type: string
|
||||
s3_buckets:
|
||||
additionalProperties:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
type: object
|
||||
CloudintegrationtypesOldAWSLogsStrategy:
|
||||
properties:
|
||||
cloudwatch_logs_subscriptions:
|
||||
items:
|
||||
properties:
|
||||
filter_pattern:
|
||||
type: string
|
||||
log_group_name_prefix:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesOldAWSMetricsStrategy:
|
||||
properties:
|
||||
cloudwatch_metric_stream_filters:
|
||||
items:
|
||||
properties:
|
||||
MetricNames:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
Namespace:
|
||||
type: string
|
||||
type: object
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
CloudintegrationtypesPostableAgentCheckInRequest:
|
||||
properties:
|
||||
account_id:
|
||||
@@ -816,8 +727,6 @@ components:
|
||||
properties:
|
||||
assets:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesAssets'
|
||||
cloudIntegrationService:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesCloudIntegrationService'
|
||||
dataCollected:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesDataCollected'
|
||||
icon:
|
||||
@@ -826,7 +735,9 @@ components:
|
||||
type: string
|
||||
overview:
|
||||
type: string
|
||||
supportedSignals:
|
||||
serviceConfig:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesServiceConfig'
|
||||
supported_signals:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesSupportedSignals'
|
||||
telemetryCollectionStrategy:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesCollectionStrategy'
|
||||
@@ -838,10 +749,9 @@ components:
|
||||
- icon
|
||||
- overview
|
||||
- assets
|
||||
- supportedSignals
|
||||
- supported_signals
|
||||
- dataCollected
|
||||
- telemetryCollectionStrategy
|
||||
- cloudIntegrationService
|
||||
type: object
|
||||
CloudintegrationtypesServiceConfig:
|
||||
properties:
|
||||
@@ -850,22 +760,6 @@ components:
|
||||
required:
|
||||
- aws
|
||||
type: object
|
||||
CloudintegrationtypesServiceID:
|
||||
enum:
|
||||
- alb
|
||||
- api-gateway
|
||||
- dynamodb
|
||||
- ec2
|
||||
- ecs
|
||||
- eks
|
||||
- elasticache
|
||||
- lambda
|
||||
- msk
|
||||
- rds
|
||||
- s3sync
|
||||
- sns
|
||||
- sqs
|
||||
type: string
|
||||
CloudintegrationtypesServiceMetadata:
|
||||
properties:
|
||||
enabled:
|
||||
@@ -882,22 +776,6 @@ components:
|
||||
- icon
|
||||
- enabled
|
||||
type: object
|
||||
CloudintegrationtypesSignozCredentials:
|
||||
properties:
|
||||
ingestionKey:
|
||||
type: string
|
||||
ingestionUrl:
|
||||
type: string
|
||||
sigNozApiKey:
|
||||
type: string
|
||||
sigNozApiURL:
|
||||
type: string
|
||||
required:
|
||||
- sigNozApiURL
|
||||
- sigNozApiKey
|
||||
- ingestionUrl
|
||||
- ingestionKey
|
||||
type: object
|
||||
CloudintegrationtypesSupportedSignals:
|
||||
properties:
|
||||
logs:
|
||||
@@ -2419,15 +2297,6 @@ components:
|
||||
- status
|
||||
- error
|
||||
type: object
|
||||
RulestatehistorytypesAlertState:
|
||||
enum:
|
||||
- inactive
|
||||
- pending
|
||||
- recovering
|
||||
- firing
|
||||
- nodata
|
||||
- disabled
|
||||
type: string
|
||||
RulestatehistorytypesGettableRuleStateHistory:
|
||||
properties:
|
||||
fingerprint:
|
||||
@@ -2439,15 +2308,15 @@ components:
|
||||
nullable: true
|
||||
type: array
|
||||
overallState:
|
||||
$ref: '#/components/schemas/RulestatehistorytypesAlertState'
|
||||
$ref: '#/components/schemas/RuletypesAlertState'
|
||||
overallStateChanged:
|
||||
type: boolean
|
||||
ruleID:
|
||||
ruleId:
|
||||
type: string
|
||||
ruleName:
|
||||
type: string
|
||||
state:
|
||||
$ref: '#/components/schemas/RulestatehistorytypesAlertState'
|
||||
$ref: '#/components/schemas/RuletypesAlertState'
|
||||
stateChanged:
|
||||
type: boolean
|
||||
unixMilli:
|
||||
@@ -2457,7 +2326,7 @@ components:
|
||||
format: double
|
||||
type: number
|
||||
required:
|
||||
- ruleID
|
||||
- ruleId
|
||||
- ruleName
|
||||
- overallState
|
||||
- overallStateChanged
|
||||
@@ -2547,12 +2416,21 @@ components:
|
||||
format: int64
|
||||
type: integer
|
||||
state:
|
||||
$ref: '#/components/schemas/RulestatehistorytypesAlertState'
|
||||
$ref: '#/components/schemas/RuletypesAlertState'
|
||||
required:
|
||||
- state
|
||||
- start
|
||||
- end
|
||||
type: object
|
||||
RuletypesAlertState:
|
||||
enum:
|
||||
- inactive
|
||||
- pending
|
||||
- recovering
|
||||
- firing
|
||||
- nodata
|
||||
- disabled
|
||||
type: string
|
||||
ServiceaccounttypesGettableFactorAPIKey:
|
||||
properties:
|
||||
createdAt:
|
||||
@@ -3516,61 +3394,6 @@ paths:
|
||||
summary: Update account
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/accounts/{id}/services/{service_id}:
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates a service for the specified cloud provider
|
||||
operationId: UpdateService
|
||||
parameters:
|
||||
- in: path
|
||||
name: cloud_provider
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: service_id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesUpdatableService'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Update service
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/accounts/check_in:
|
||||
post:
|
||||
deprecated: false
|
||||
@@ -3628,59 +3451,6 @@ paths:
|
||||
summary: Agent check-in
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/credentials:
|
||||
get:
|
||||
deprecated: false
|
||||
description: This endpoint retrieves the connection credentials required for
|
||||
integration
|
||||
operationId: GetConnectionCredentials
|
||||
parameters:
|
||||
- in: path
|
||||
name: cloud_provider
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesSignozCredentials'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- data
|
||||
type: object
|
||||
description: OK
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Get connection credentials
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/cloud_integrations/{cloud_provider}/services:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -3791,6 +3561,55 @@ paths:
|
||||
summary: Get service
|
||||
tags:
|
||||
- cloudintegration
|
||||
put:
|
||||
deprecated: false
|
||||
description: This endpoint updates a service for the specified cloud provider
|
||||
operationId: UpdateService
|
||||
parameters:
|
||||
- in: path
|
||||
name: cloud_provider
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
- in: path
|
||||
name: service_id
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/CloudintegrationtypesUpdatableService'
|
||||
responses:
|
||||
"204":
|
||||
description: No Content
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- ADMIN
|
||||
- tokenizer:
|
||||
- ADMIN
|
||||
summary: Update service
|
||||
tags:
|
||||
- cloudintegration
|
||||
/api/v1/complete/google:
|
||||
get:
|
||||
deprecated: false
|
||||
@@ -8634,7 +8453,7 @@ paths:
|
||||
- in: query
|
||||
name: state
|
||||
schema:
|
||||
$ref: '#/components/schemas/RulestatehistorytypesAlertState'
|
||||
$ref: '#/components/schemas/RuletypesAlertState'
|
||||
- in: query
|
||||
name: filterExpression
|
||||
schema:
|
||||
|
||||
@@ -193,6 +193,7 @@ uv run pytest --basetemp=./tmp/ -vv --reuse src/passwordauthn/01_register.py::te
|
||||
Tests can be configured using pytest options:
|
||||
|
||||
- `--sqlstore-provider` - Choose database provider (default: postgres)
|
||||
- `--sqlite-mode` - SQLite journal mode: `delete` or `wal` (default: delete). Only relevant when `--sqlstore-provider=sqlite`.
|
||||
- `--postgres-version` - PostgreSQL version (default: 15)
|
||||
- `--clickhouse-version` - ClickHouse version (default: 25.5.6)
|
||||
- `--zookeeper-version` - Zookeeper version (default: 3.7.1)
|
||||
@@ -202,7 +203,6 @@ Example:
|
||||
uv run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postgres-version=14 src/auth/
|
||||
```
|
||||
|
||||
|
||||
## What should I remember?
|
||||
|
||||
- **Always use the `--reuse` flag** when setting up the environment to keep containers running
|
||||
@@ -213,3 +213,4 @@ uv run pytest --basetemp=./tmp/ -vv --reuse --sqlstore-provider=postgres --postg
|
||||
- **Use descriptive test names** that clearly indicate what is being tested
|
||||
- **Leverage fixtures** for common setup and authentication
|
||||
- **Test both success and failure scenarios** to ensure robust functionality
|
||||
- **`--sqlite-mode=wal` does not work on macOS.** The integration test environment runs SigNoz inside a Linux container with the SQLite database file mounted from the macOS host. WAL mode requires shared memory between connections, and connections crossing the VM boundary (macOS host ↔ Linux container) cannot share the WAL index, resulting in `SQLITE_IOERR_SHORT_READ`. WAL mode is tested in CI on Linux only.
|
||||
|
||||
@@ -32,7 +32,7 @@ func (s Seasonality) IsValid() bool {
|
||||
}
|
||||
|
||||
type AnomaliesRequest struct {
|
||||
Params qbtypes.QueryRangeRequest
|
||||
Params *qbtypes.QueryRangeRequest
|
||||
Seasonality Seasonality
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ type anomalyQueryParams struct {
|
||||
Past3SeasonQuery qbtypes.QueryRangeRequest
|
||||
}
|
||||
|
||||
func prepareAnomalyQueryParams(req qbtypes.QueryRangeRequest, seasonality Seasonality) *anomalyQueryParams {
|
||||
func prepareAnomalyQueryParams(req *qbtypes.QueryRangeRequest, seasonality Seasonality) *anomalyQueryParams {
|
||||
start := req.Start
|
||||
end := req.End
|
||||
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
package implcloudprovider
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"sort"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
)
|
||||
|
||||
type awscloudprovider struct {
|
||||
serviceDefinitions cloudintegrationtypes.ServiceDefinitionStore
|
||||
}
|
||||
|
||||
func NewAWSCloudProvider(defStore cloudintegrationtypes.ServiceDefinitionStore) (cloudintegration.CloudProviderModule, error) {
|
||||
return &awscloudprovider{serviceDefinitions: defStore}, nil
|
||||
}
|
||||
|
||||
func (provider *awscloudprovider) GetConnectionArtifact(ctx context.Context, account *cloudintegrationtypes.Account, req *cloudintegrationtypes.ConnectionArtifactRequest) (*cloudintegrationtypes.ConnectionArtifact, error) {
|
||||
baseURL := fmt.Sprintf(cloudintegrationtypes.CloudFormationQuickCreateBaseURL.StringValue(), req.Config.Aws.DeploymentRegion)
|
||||
u, _ := url.Parse(baseURL)
|
||||
|
||||
q := u.Query()
|
||||
q.Set("region", req.Config.Aws.DeploymentRegion)
|
||||
u.Fragment = "/stacks/quickcreate"
|
||||
|
||||
u.RawQuery = q.Encode()
|
||||
|
||||
q = u.Query()
|
||||
q.Set("stackName", cloudintegrationtypes.AgentCloudFormationBaseStackName.StringValue())
|
||||
q.Set("templateURL", fmt.Sprintf(cloudintegrationtypes.AgentCloudFormationTemplateS3Path.StringValue(), req.Config.AgentVersion))
|
||||
q.Set("param_SigNozIntegrationAgentVersion", req.Config.AgentVersion)
|
||||
q.Set("param_SigNozApiUrl", req.Credentials.SigNozAPIURL)
|
||||
q.Set("param_SigNozApiKey", req.Credentials.SigNozAPIKey)
|
||||
q.Set("param_SigNozAccountId", account.ID.StringValue())
|
||||
q.Set("param_IngestionUrl", req.Credentials.IngestionURL)
|
||||
q.Set("param_IngestionKey", req.Credentials.IngestionKey)
|
||||
|
||||
return &cloudintegrationtypes.ConnectionArtifact{
|
||||
Aws: &cloudintegrationtypes.AWSConnectionArtifact{
|
||||
ConnectionURL: u.String() + "?&" + q.Encode(), // this format is required by AWS
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (provider *awscloudprovider) ListServiceDefinitions(ctx context.Context) ([]*cloudintegrationtypes.ServiceDefinition, error) {
|
||||
return provider.serviceDefinitions.List(ctx, cloudintegrationtypes.CloudProviderTypeAWS)
|
||||
}
|
||||
|
||||
func (provider *awscloudprovider) GetServiceDefinition(ctx context.Context, serviceID cloudintegrationtypes.ServiceID) (*cloudintegrationtypes.ServiceDefinition, error) {
|
||||
serviceDef, err := provider.serviceDefinitions.Get(ctx, cloudintegrationtypes.CloudProviderTypeAWS, serviceID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// override cloud integration dashboard id
|
||||
for index, dashboard := range serviceDef.Assets.Dashboards {
|
||||
serviceDef.Assets.Dashboards[index].ID = cloudintegrationtypes.GetCloudIntegrationDashboardID(cloudintegrationtypes.CloudProviderTypeAWS, serviceID.StringValue(), dashboard.ID)
|
||||
}
|
||||
|
||||
return serviceDef, nil
|
||||
}
|
||||
|
||||
func (provider *awscloudprovider) BuildIntegrationConfig(
|
||||
ctx context.Context,
|
||||
account *cloudintegrationtypes.Account,
|
||||
services []*cloudintegrationtypes.StorableCloudIntegrationService,
|
||||
) (*cloudintegrationtypes.ProviderIntegrationConfig, error) {
|
||||
// Sort services for deterministic output
|
||||
sort.Slice(services, func(i, j int) bool {
|
||||
return services[i].Type.StringValue() < services[j].Type.StringValue()
|
||||
})
|
||||
|
||||
compiledMetrics := new(cloudintegrationtypes.AWSMetricsStrategy)
|
||||
compiledLogs := new(cloudintegrationtypes.AWSLogsStrategy)
|
||||
var compiledS3Buckets map[string][]string
|
||||
|
||||
for _, storedSvc := range services {
|
||||
svcCfg, err := cloudintegrationtypes.NewServiceConfigFromJSON(cloudintegrationtypes.CloudProviderTypeAWS, storedSvc.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
svcDef, err := provider.GetServiceDefinition(ctx, storedSvc.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
strategy := svcDef.Strategy.AWS
|
||||
logsEnabled := cloudintegrationtypes.IsLogsEnabled(cloudintegrationtypes.CloudProviderTypeAWS, svcCfg)
|
||||
|
||||
// S3Sync: logs come directly from configured S3 buckets, not CloudWatch subscriptions
|
||||
if storedSvc.Type == cloudintegrationtypes.AWSServiceS3Sync {
|
||||
if logsEnabled && svcCfg.AWS.Logs.S3Buckets != nil {
|
||||
compiledS3Buckets = svcCfg.AWS.Logs.S3Buckets
|
||||
}
|
||||
// no need to go ahead as the code block specifically checks for the S3Sync service
|
||||
continue
|
||||
}
|
||||
|
||||
if logsEnabled && strategy.Logs != nil {
|
||||
compiledLogs.Subscriptions = append(compiledLogs.Subscriptions, strategy.Logs.Subscriptions...)
|
||||
}
|
||||
|
||||
metricsEnabled := cloudintegrationtypes.IsMetricsEnabled(cloudintegrationtypes.CloudProviderTypeAWS, svcCfg)
|
||||
|
||||
if metricsEnabled && strategy.Metrics != nil {
|
||||
compiledMetrics.StreamFilters = append(compiledMetrics.StreamFilters, strategy.Metrics.StreamFilters...)
|
||||
}
|
||||
}
|
||||
|
||||
awsTelemetry := new(cloudintegrationtypes.AWSCollectionStrategy)
|
||||
|
||||
if len(compiledMetrics.StreamFilters) > 0 {
|
||||
awsTelemetry.Metrics = compiledMetrics
|
||||
}
|
||||
if len(compiledLogs.Subscriptions) > 0 {
|
||||
awsTelemetry.Logs = compiledLogs
|
||||
}
|
||||
if compiledS3Buckets != nil {
|
||||
awsTelemetry.S3Buckets = compiledS3Buckets
|
||||
}
|
||||
|
||||
return &cloudintegrationtypes.ProviderIntegrationConfig{
|
||||
AWS: &cloudintegrationtypes.AWSIntegrationConfig{
|
||||
EnabledRegions: account.Config.AWS.Regions,
|
||||
Telemetry: awsTelemetry,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
package implcloudprovider
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
)
|
||||
|
||||
type azurecloudprovider struct{}
|
||||
|
||||
func NewAzureCloudProvider() cloudintegration.CloudProviderModule {
|
||||
return &azurecloudprovider{}
|
||||
}
|
||||
|
||||
func (provider *azurecloudprovider) GetConnectionArtifact(ctx context.Context, account *cloudintegrationtypes.Account, req *cloudintegrationtypes.ConnectionArtifactRequest) (*cloudintegrationtypes.ConnectionArtifact, error) {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (provider *azurecloudprovider) ListServiceDefinitions(ctx context.Context) ([]*cloudintegrationtypes.ServiceDefinition, error) {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (provider *azurecloudprovider) GetServiceDefinition(ctx context.Context, serviceID cloudintegrationtypes.ServiceID) (*cloudintegrationtypes.ServiceDefinition, error) {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (provider *azurecloudprovider) BuildIntegrationConfig(
|
||||
ctx context.Context,
|
||||
account *cloudintegrationtypes.Account,
|
||||
services []*cloudintegrationtypes.StorableCloudIntegrationService,
|
||||
) (*cloudintegrationtypes.ProviderIntegrationConfig, error) {
|
||||
panic("implement me")
|
||||
}
|
||||
@@ -1,513 +0,0 @@
|
||||
package implcloudintegration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/gateway"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/licensing"
|
||||
"github.com/SigNoz/signoz/pkg/modules/cloudintegration"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/cloudintegrationtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/dashboardtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/zeustypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/SigNoz/signoz/pkg/zeus"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store cloudintegrationtypes.Store
|
||||
gateway gateway.Gateway
|
||||
zeus zeus.Zeus
|
||||
licensing licensing.Licensing
|
||||
global global.Global
|
||||
serviceAccount serviceaccount.Module
|
||||
cloudProvidersMap map[cloudintegrationtypes.CloudProviderType]cloudintegration.CloudProviderModule
|
||||
config cloudintegration.Config
|
||||
}
|
||||
|
||||
func NewModule(
|
||||
store cloudintegrationtypes.Store,
|
||||
global global.Global,
|
||||
zeus zeus.Zeus,
|
||||
gateway gateway.Gateway,
|
||||
licensing licensing.Licensing,
|
||||
serviceAccount serviceaccount.Module,
|
||||
cloudProvidersMap map[cloudintegrationtypes.CloudProviderType]cloudintegration.CloudProviderModule,
|
||||
config cloudintegration.Config,
|
||||
) (cloudintegration.Module, error) {
|
||||
return &module{
|
||||
store: store,
|
||||
global: global,
|
||||
zeus: zeus,
|
||||
gateway: gateway,
|
||||
licensing: licensing,
|
||||
serviceAccount: serviceAccount,
|
||||
cloudProvidersMap: cloudProvidersMap,
|
||||
config: config,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetConnectionCredentials returns credentials required to generate connection artifact. eg. apiKey, ingestionKey etc.
|
||||
// It will return creds it can deduce and return empty value for others.
|
||||
func (module *module) GetConnectionCredentials(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) (*cloudintegrationtypes.SignozCredentials, error) {
|
||||
// get license to get the deployment details
|
||||
license, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
var ingestionURL string
|
||||
|
||||
globalConfig := module.global.GetConfig(ctx)
|
||||
if globalConfig != nil {
|
||||
ingestionURL = globalConfig.IngestionURL
|
||||
}
|
||||
|
||||
// get deployment details from zeus, ignore error
|
||||
respBytes, _ := module.zeus.GetDeployment(ctx, license.Key)
|
||||
|
||||
var signozAPIURL string
|
||||
|
||||
if len(respBytes) > 0 {
|
||||
// parse deployment details, ignore error, if client is asking api url every time check for possible error
|
||||
deployment, _ := zeustypes.NewGettableDeployment(respBytes)
|
||||
if deployment != nil {
|
||||
signozAPIURL = deployment.SignozAPIUrl
|
||||
}
|
||||
}
|
||||
|
||||
// ignore error
|
||||
apiKey, _ := module.getOrCreateAPIKey(ctx, orgID, provider)
|
||||
|
||||
// ignore error
|
||||
ingestionKey, _ := module.getOrCreateIngestionKey(ctx, orgID, provider)
|
||||
|
||||
return &cloudintegrationtypes.SignozCredentials{
|
||||
SigNozAPIURL: signozAPIURL,
|
||||
SigNozAPIKey: apiKey,
|
||||
IngestionURL: ingestionURL,
|
||||
IngestionKey: ingestionKey,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (module *module) CreateAccount(ctx context.Context, account *cloudintegrationtypes.Account) error {
|
||||
_, err := module.licensing.GetActive(ctx, account.OrgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableCloudIntegration, err := cloudintegrationtypes.NewStorableCloudIntegration(account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return module.store.CreateAccount(ctx, storableCloudIntegration)
|
||||
}
|
||||
|
||||
func (module *module) GetConnectionArtifact(ctx context.Context, account *cloudintegrationtypes.Account, req *cloudintegrationtypes.ConnectionArtifactRequest) (*cloudintegrationtypes.ConnectionArtifact, error) {
|
||||
cloudProviderModule, err := module.getCloudProvider(account.Provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Config.AddAgentVersion(module.config.Agent.Version)
|
||||
|
||||
return cloudProviderModule.GetConnectionArtifact(ctx, account, req)
|
||||
}
|
||||
|
||||
func (module *module) GetAccount(ctx context.Context, orgID valuer.UUID, accountID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) (*cloudintegrationtypes.Account, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableAccount, err := module.store.GetAccountByID(ctx, orgID, accountID, provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cloudintegrationtypes.NewAccountFromStorable(storableAccount)
|
||||
}
|
||||
|
||||
// ListAccounts return only agent connected accounts.
|
||||
func (module *module) ListAccounts(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) ([]*cloudintegrationtypes.Account, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableAccounts, err := module.store.ListConnectedAccounts(ctx, orgID, provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cloudintegrationtypes.NewAccountsFromStorables(storableAccounts)
|
||||
}
|
||||
|
||||
func (module *module) AgentCheckIn(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, req *cloudintegrationtypes.AgentCheckInRequest) (*cloudintegrationtypes.AgentCheckInResponse, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
connectedAccount, err := module.store.GetConnectedAccount(ctx, orgID, provider, req.ProviderAccountID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If a different integration is already connected to this provider account ID, reject the check-in.
|
||||
// Allow re-check-in from the same integration (e.g. agent restarting).
|
||||
if connectedAccount != nil && connectedAccount.ID != req.CloudIntegrationID {
|
||||
errMessage := fmt.Sprintf("provider account id %s is already connected to cloud integration id %s", req.ProviderAccountID, connectedAccount.ID)
|
||||
return nil, errors.New(errors.TypeAlreadyExists, cloudintegrationtypes.ErrCodeCloudIntegrationAlreadyConnected, errMessage)
|
||||
}
|
||||
|
||||
account, err := module.store.GetAccountByID(ctx, orgID, req.CloudIntegrationID, provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// update account with cloud provider account id and agent report (heartbeat)
|
||||
account.Update(&req.ProviderAccountID, cloudintegrationtypes.NewAgentReport(req.Data))
|
||||
|
||||
err = module.store.UpdateAccount(ctx, account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If account has been removed (disconnected), return a minimal response with empty integration config.
|
||||
// The agent doesn't act on config for removed accounts.
|
||||
if account.RemovedAt != nil {
|
||||
return &cloudintegrationtypes.AgentCheckInResponse{
|
||||
CloudIntegrationID: account.ID.StringValue(),
|
||||
ProviderAccountID: req.ProviderAccountID,
|
||||
IntegrationConfig: new(cloudintegrationtypes.ProviderIntegrationConfig),
|
||||
RemovedAt: account.RemovedAt,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Get account as domain object for config access (enabled regions, etc.)
|
||||
domainAccount, err := cloudintegrationtypes.NewAccountFromStorable(account)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cloudProvider, err := module.getCloudProvider(provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storedServices, err := module.store.ListServices(ctx, req.CloudIntegrationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Delegate integration config building entirely to the provider module
|
||||
integrationConfig, err := cloudProvider.BuildIntegrationConfig(ctx, domainAccount, storedServices)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &cloudintegrationtypes.AgentCheckInResponse{
|
||||
CloudIntegrationID: account.ID.StringValue(),
|
||||
ProviderAccountID: req.ProviderAccountID,
|
||||
IntegrationConfig: integrationConfig,
|
||||
RemovedAt: account.RemovedAt,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (module *module) UpdateAccount(ctx context.Context, account *cloudintegrationtypes.Account) error {
|
||||
_, err := module.licensing.GetActive(ctx, account.OrgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
storableAccount, err := cloudintegrationtypes.NewStorableCloudIntegration(account)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return module.store.UpdateAccount(ctx, storableAccount)
|
||||
}
|
||||
|
||||
func (module *module) DisconnectAccount(ctx context.Context, orgID valuer.UUID, accountID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return module.store.RemoveAccount(ctx, orgID, accountID, provider)
|
||||
}
|
||||
|
||||
func (module *module) ListServicesMetadata(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType, integrationID *valuer.UUID) ([]*cloudintegrationtypes.ServiceMetadata, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
cloudProvider, err := module.getCloudProvider(provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceDefinitions, err := cloudProvider.ListServiceDefinitions(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
enabledServiceIDs := map[string]bool{}
|
||||
if integrationID != nil {
|
||||
storedServices, err := module.store.ListServices(ctx, *integrationID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, svc := range storedServices {
|
||||
serviceConfig, err := cloudintegrationtypes.NewServiceConfigFromJSON(provider, svc.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cloudintegrationtypes.IsServiceEnabled(provider, serviceConfig) {
|
||||
enabledServiceIDs[svc.Type.StringValue()] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resp := make([]*cloudintegrationtypes.ServiceMetadata, 0, len(serviceDefinitions))
|
||||
for _, serviceDefinition := range serviceDefinitions {
|
||||
resp = append(resp, cloudintegrationtypes.NewServiceMetadata(*serviceDefinition, enabledServiceIDs[serviceDefinition.ID]))
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (module *module) GetService(ctx context.Context, orgID valuer.UUID, integrationID *valuer.UUID, serviceID cloudintegrationtypes.ServiceID, provider cloudintegrationtypes.CloudProviderType) (*cloudintegrationtypes.Service, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
cloudProvider, err := module.getCloudProvider(provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
serviceDefinition, err := cloudProvider.GetServiceDefinition(ctx, serviceID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var integrationService *cloudintegrationtypes.CloudIntegrationService
|
||||
|
||||
if integrationID != nil {
|
||||
storedService, err := module.store.GetServiceByServiceID(ctx, *integrationID, serviceID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
if storedService != nil {
|
||||
serviceConfig, err := cloudintegrationtypes.NewServiceConfigFromJSON(provider, storedService.Config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
integrationService = cloudintegrationtypes.NewCloudIntegrationServiceFromStorable(storedService, serviceConfig)
|
||||
}
|
||||
}
|
||||
|
||||
return cloudintegrationtypes.NewService(*serviceDefinition, integrationService), nil
|
||||
}
|
||||
|
||||
func (module *module) CreateService(ctx context.Context, orgID valuer.UUID, service *cloudintegrationtypes.CloudIntegrationService, provider cloudintegrationtypes.CloudProviderType) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
cloudProvider, err := module.getCloudProvider(provider)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
serviceDefinition, err := cloudProvider.GetServiceDefinition(ctx, service.Type)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
configJSON, err := service.Config.ToJSON(provider, service.Type, &serviceDefinition.SupportedSignals)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return module.store.CreateService(ctx, cloudintegrationtypes.NewStorableCloudIntegrationService(service, string(configJSON)))
|
||||
}
|
||||
|
||||
func (module *module) UpdateService(ctx context.Context, orgID valuer.UUID, integrationService *cloudintegrationtypes.CloudIntegrationService, provider cloudintegrationtypes.CloudProviderType) error {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
cloudProvider, err := module.getCloudProvider(provider)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
serviceDefinition, err := cloudProvider.GetServiceDefinition(ctx, integrationService.Type)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
configJSON, err := integrationService.Config.ToJSON(provider, integrationService.Type, &serviceDefinition.SupportedSignals)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storableService := cloudintegrationtypes.NewStorableCloudIntegrationService(integrationService, string(configJSON))
|
||||
|
||||
return module.store.UpdateService(ctx, storableService)
|
||||
}
|
||||
|
||||
func (module *module) GetDashboardByID(ctx context.Context, orgID valuer.UUID, id string) (*dashboardtypes.Dashboard, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
_, _, _, err = cloudintegrationtypes.ParseCloudIntegrationDashboardID(id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
allDashboards, err := module.listDashboards(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, d := range allDashboards {
|
||||
if d.ID == id {
|
||||
return d, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, errors.New(errors.TypeNotFound, cloudintegrationtypes.ErrCodeCloudIntegrationNotFound, "cloud integration dashboard not found")
|
||||
}
|
||||
|
||||
func (module *module) ListDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
_, err := module.licensing.GetActive(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, errors.New(errors.TypeLicenseUnavailable, errors.CodeLicenseUnavailable, "a valid license is not available").WithAdditional("this feature requires a valid license").WithAdditional(err.Error())
|
||||
}
|
||||
|
||||
return module.listDashboards(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *module) getCloudProvider(provider cloudintegrationtypes.CloudProviderType) (cloudintegration.CloudProviderModule, error) {
|
||||
if cloudProviderModule, ok := module.cloudProvidersMap[provider]; ok {
|
||||
return cloudProviderModule, nil
|
||||
}
|
||||
|
||||
return nil, errors.NewInvalidInputf(cloudintegrationtypes.ErrCodeCloudProviderInvalidInput, "invalid cloud provider: %s", provider.StringValue())
|
||||
}
|
||||
|
||||
func (module *module) getOrCreateIngestionKey(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) (string, error) {
|
||||
keyName := cloudintegrationtypes.NewIngestionKeyName(provider)
|
||||
|
||||
result, err := module.gateway.SearchIngestionKeysByName(ctx, orgID, keyName, 1, 10)
|
||||
if err != nil {
|
||||
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't search ingestion keys")
|
||||
}
|
||||
|
||||
// ideally there should be only one key per cloud integration provider
|
||||
if len(result.Keys) > 0 {
|
||||
return result.Keys[0].Value, nil
|
||||
}
|
||||
|
||||
createdIngestionKey, err := module.gateway.CreateIngestionKey(ctx, orgID, keyName, []string{"integration"}, time.Time{})
|
||||
if err != nil {
|
||||
return "", errors.WrapInternalf(err, errors.CodeInternal, "couldn't create ingestion key")
|
||||
}
|
||||
|
||||
return createdIngestionKey.Value, nil
|
||||
}
|
||||
|
||||
func (module *module) getOrCreateAPIKey(ctx context.Context, orgID valuer.UUID, provider cloudintegrationtypes.CloudProviderType) (string, error) {
|
||||
domain := module.serviceAccount.Config().Email.Domain
|
||||
serviceAccount := serviceaccounttypes.NewServiceAccount("integration", domain, serviceaccounttypes.ServiceAccountStatusActive, orgID)
|
||||
serviceAccount, err := module.serviceAccount.GetOrCreate(ctx, orgID, serviceAccount)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = module.serviceAccount.SetRoleByName(ctx, orgID, serviceAccount.ID, authtypes.SigNozViewerRoleName)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
factorAPIKey, err := serviceAccount.NewFactorAPIKey(provider.StringValue(), 0)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
factorAPIKey, err = module.serviceAccount.GetOrCreateFactorAPIKey(ctx, factorAPIKey)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return factorAPIKey.Key, nil
|
||||
}
|
||||
|
||||
func (module *module) listDashboards(ctx context.Context, orgID valuer.UUID) ([]*dashboardtypes.Dashboard, error) {
|
||||
var allDashboards []*dashboardtypes.Dashboard
|
||||
|
||||
for provider := range module.cloudProvidersMap {
|
||||
cloudProvider, err := module.getCloudProvider(provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
connectedAccounts, err := module.store.ListConnectedAccounts(ctx, orgID, provider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, storableAccount := range connectedAccounts {
|
||||
storedServices, err := module.store.ListServices(ctx, storableAccount.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, storedSvc := range storedServices {
|
||||
serviceConfig, err := cloudintegrationtypes.NewServiceConfigFromJSON(provider, storedSvc.Config)
|
||||
if err != nil || !cloudintegrationtypes.IsMetricsEnabled(provider, serviceConfig) {
|
||||
continue
|
||||
}
|
||||
|
||||
svcDef, err := cloudProvider.GetServiceDefinition(ctx, storedSvc.Type)
|
||||
if err != nil || svcDef == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
dashboards := cloudintegrationtypes.GetDashboardsFromAssets(
|
||||
storedSvc.Type.StringValue(),
|
||||
orgID,
|
||||
provider,
|
||||
storableAccount.CreatedAt,
|
||||
svcDef.Assets,
|
||||
)
|
||||
allDashboards = append(allDashboards, dashboards...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Slice(allDashboards, func(i, j int) bool {
|
||||
return allDashboards[i].ID < allDashboards[j].ID
|
||||
})
|
||||
|
||||
return allDashboards, nil
|
||||
}
|
||||
@@ -65,7 +65,7 @@ func (h *handler) QueryRange(rw http.ResponseWriter, req *http.Request) {
|
||||
}
|
||||
|
||||
if anomalyQuery, ok := queryRangeRequest.IsAnomalyRequest(); ok {
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, queryRangeRequest)
|
||||
anomalies, err := h.handleAnomalyQuery(ctx, orgID, anomalyQuery, &queryRangeRequest)
|
||||
if err != nil {
|
||||
render.Error(rw, errors.NewInternalf(errors.CodeInternal, "failed to get anomalies: %v", err))
|
||||
return
|
||||
@@ -149,7 +149,7 @@ func (h *handler) createAnomalyProvider(seasonality anomalyV2.Seasonality) anoma
|
||||
}
|
||||
}
|
||||
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
func (h *handler) handleAnomalyQuery(ctx context.Context, orgID valuer.UUID, anomalyQuery *qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation], queryRangeRequest *qbtypes.QueryRangeRequest) (*anomalyV2.AnomaliesResponse, error) {
|
||||
seasonality := extractSeasonality(anomalyQuery)
|
||||
provider := h.createAnomalyProvider(seasonality)
|
||||
|
||||
|
||||
@@ -49,7 +49,6 @@ import (
|
||||
opAmpModel "github.com/SigNoz/signoz/pkg/query-service/app/opamp/model"
|
||||
baseconst "github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/healthcheck"
|
||||
baseint "github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils"
|
||||
)
|
||||
@@ -99,7 +98,6 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
)
|
||||
|
||||
rm, err := makeRulesManager(
|
||||
reader,
|
||||
signoz.Cache,
|
||||
signoz.Alertmanager,
|
||||
signoz.SQLStore,
|
||||
@@ -345,7 +343,7 @@ func (s *Server) Stop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, metadataStore telemetrytypes.MetadataStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, ruleStateHistoryModule rulestatehistory.Module, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
|
||||
func makeRulesManager(cache cache.Cache, alertmanager alertmanager.Alertmanager, sqlstore sqlstore.SQLStore, telemetryStore telemetrystore.TelemetryStore, metadataStore telemetrytypes.MetadataStore, prometheus prometheus.Prometheus, orgGetter organization.Getter, ruleStateHistoryModule rulestatehistory.Module, querier querier.Querier, providerSettings factory.ProviderSettings, queryParser queryparser.QueryParser) (*baserules.Manager, error) {
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
maintenanceStore := sqlrulestore.NewMaintenanceStore(sqlstore)
|
||||
// create manager opts
|
||||
@@ -354,7 +352,6 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
||||
MetadataStore: metadataStore,
|
||||
Prometheus: prometheus,
|
||||
Context: context.Background(),
|
||||
Reader: ch,
|
||||
Querier: querier,
|
||||
Logger: providerSettings.Logger,
|
||||
Cache: cache,
|
||||
@@ -365,7 +362,7 @@ func makeRulesManager(ch baseint.Reader, cache cache.Cache, alertmanager alertma
|
||||
OrgGetter: orgGetter,
|
||||
RuleStore: ruleStore,
|
||||
MaintenanceStore: maintenanceStore,
|
||||
SqlStore: sqlstore,
|
||||
SQLStore: sqlstore,
|
||||
QueryParser: queryParser,
|
||||
RuleStateHistoryModule: ruleStateHistoryModule,
|
||||
}
|
||||
|
||||
@@ -5,58 +5,34 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/cache"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/rulestatehistorytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/units"
|
||||
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
|
||||
querierV5 "github.com/SigNoz/signoz/pkg/querier"
|
||||
|
||||
anomalyV2 "github.com/SigNoz/signoz/ee/anomaly"
|
||||
"github.com/SigNoz/signoz/ee/anomaly"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
)
|
||||
|
||||
const (
|
||||
RuleTypeAnomaly = "anomaly_rule"
|
||||
)
|
||||
|
||||
type AnomalyRule struct {
|
||||
*baserules.BaseRule
|
||||
|
||||
mtx sync.Mutex
|
||||
|
||||
reader interfaces.Reader
|
||||
// querier is used for alerts migrated after the introduction of new query builder
|
||||
querier querier.Querier
|
||||
|
||||
// querierV2 is used for alerts created after the introduction of new metrics query builder
|
||||
querierV2 interfaces.Querier
|
||||
|
||||
// querierV5 is used for alerts migrated after the introduction of new query builder
|
||||
querierV5 querierV5.Querier
|
||||
|
||||
provider anomaly.Provider
|
||||
providerV2 anomalyV2.Provider
|
||||
provider anomaly.Provider
|
||||
|
||||
version string
|
||||
logger *slog.Logger
|
||||
@@ -70,18 +46,16 @@ func NewAnomalyRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
p *ruletypes.PostableRule,
|
||||
reader interfaces.Reader,
|
||||
querierV5 querierV5.Querier,
|
||||
querier querier.Querier,
|
||||
logger *slog.Logger,
|
||||
cache cache.Cache,
|
||||
opts ...baserules.RuleOption,
|
||||
) (*AnomalyRule, error) {
|
||||
|
||||
logger.Info("creating new AnomalyRule", "rule_id", id)
|
||||
logger.Info("creating new AnomalyRule", slog.String("rule.id", id))
|
||||
|
||||
opts = append(opts, baserules.WithLogger(logger))
|
||||
|
||||
baseRule, err := baserules.NewBaseRule(id, orgID, p, reader, opts...)
|
||||
baseRule, err := baserules.NewBaseRule(id, orgID, p, opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -101,93 +75,38 @@ func NewAnomalyRule(
|
||||
t.seasonality = anomaly.SeasonalityDaily
|
||||
}
|
||||
|
||||
logger.Info("using seasonality", "seasonality", t.seasonality.String())
|
||||
logger.Info("using seasonality", slog.String("rule.id", id), slog.String("rule.seasonality", t.seasonality.StringValue()))
|
||||
|
||||
querierOptsV2 := querierV2.QuerierOptions{
|
||||
Reader: reader,
|
||||
Cache: cache,
|
||||
KeyGenerator: queryBuilder.NewKeyGenerator(),
|
||||
}
|
||||
|
||||
t.querierV2 = querierV2.NewQuerier(querierOptsV2)
|
||||
t.reader = reader
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.provider = anomaly.NewHourlyProvider(
|
||||
anomaly.WithCache[*anomaly.HourlyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.HourlyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.HourlyProvider](reader),
|
||||
anomaly.WithQuerier[*anomaly.HourlyProvider](querier),
|
||||
anomaly.WithLogger[*anomaly.HourlyProvider](logger),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.provider = anomaly.NewDailyProvider(
|
||||
anomaly.WithCache[*anomaly.DailyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.DailyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.DailyProvider](reader),
|
||||
anomaly.WithQuerier[*anomaly.DailyProvider](querier),
|
||||
anomaly.WithLogger[*anomaly.DailyProvider](logger),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.provider = anomaly.NewWeeklyProvider(
|
||||
anomaly.WithCache[*anomaly.WeeklyProvider](cache),
|
||||
anomaly.WithKeyGenerator[*anomaly.WeeklyProvider](queryBuilder.NewKeyGenerator()),
|
||||
anomaly.WithReader[*anomaly.WeeklyProvider](reader),
|
||||
anomaly.WithQuerier[*anomaly.WeeklyProvider](querier),
|
||||
anomaly.WithLogger[*anomaly.WeeklyProvider](logger),
|
||||
)
|
||||
}
|
||||
|
||||
if t.seasonality == anomaly.SeasonalityHourly {
|
||||
t.providerV2 = anomalyV2.NewHourlyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.HourlyProvider](querierV5),
|
||||
anomalyV2.WithLogger[*anomalyV2.HourlyProvider](logger),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityDaily {
|
||||
t.providerV2 = anomalyV2.NewDailyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.DailyProvider](querierV5),
|
||||
anomalyV2.WithLogger[*anomalyV2.DailyProvider](logger),
|
||||
)
|
||||
} else if t.seasonality == anomaly.SeasonalityWeekly {
|
||||
t.providerV2 = anomalyV2.NewWeeklyProvider(
|
||||
anomalyV2.WithQuerier[*anomalyV2.WeeklyProvider](querierV5),
|
||||
anomalyV2.WithLogger[*anomalyV2.WeeklyProvider](logger),
|
||||
)
|
||||
}
|
||||
|
||||
t.querierV5 = querierV5
|
||||
t.querier = querier
|
||||
t.version = p.Version
|
||||
t.logger = logger
|
||||
return &t, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) Type() ruletypes.RuleType {
|
||||
return RuleTypeAnomaly
|
||||
return ruletypes.RuleTypeAnomaly
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) (*v3.QueryRangeParamsV3, error) {
|
||||
func (r *AnomalyRule) prepareQueryRange(ctx context.Context, ts time.Time) *qbtypes.QueryRangeRequest {
|
||||
|
||||
r.logger.InfoContext(
|
||||
ctx, "prepare query range request v4", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds(),
|
||||
)
|
||||
|
||||
st, en := r.Timestamps(ts)
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
|
||||
compositeQuery := r.Condition().CompositeQuery
|
||||
|
||||
if compositeQuery.PanelType != v3.PanelTypeGraph {
|
||||
compositeQuery.PanelType = v3.PanelTypeGraph
|
||||
}
|
||||
|
||||
// default mode
|
||||
return &v3.QueryRangeParamsV3{
|
||||
Start: start,
|
||||
End: end,
|
||||
Step: int64(math.Max(float64(common.MinAllowedStepInterval(start, end)), 60)),
|
||||
CompositeQuery: compositeQuery,
|
||||
Variables: make(map[string]interface{}, 0),
|
||||
NoCache: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*qbtypes.QueryRangeRequest, error) {
|
||||
|
||||
r.logger.InfoContext(ctx, "prepare query range request v5", "ts", ts.UnixMilli(), "eval_window", r.EvalWindow().Milliseconds(), "eval_delay", r.EvalDelay().Milliseconds())
|
||||
r.logger.InfoContext(ctx, "prepare query range request", slog.String("rule.id", r.ID()), slog.Int64("ts", ts.UnixMilli()), slog.Int64("eval.window_ms", r.EvalWindow().Milliseconds()), slog.Int64("eval.delay_ms", r.EvalDelay().Milliseconds()))
|
||||
|
||||
startTs, endTs := r.Timestamps(ts)
|
||||
start, end := startTs.UnixMilli(), endTs.UnixMilli()
|
||||
@@ -203,25 +122,14 @@ func (r *AnomalyRule) prepareQueryRangeV5(ctx context.Context, ts time.Time) (*q
|
||||
}
|
||||
req.CompositeQuery.Queries = make([]qbtypes.QueryEnvelope, len(r.Condition().CompositeQuery.Queries))
|
||||
copy(req.CompositeQuery.Queries, r.Condition().CompositeQuery.Queries)
|
||||
return req, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) GetSelectedQuery() string {
|
||||
return r.Condition().GetSelectedQueryName()
|
||||
return req
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRange(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = r.PopulateTemporality(ctx, orgID, params)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("internal error while setting temporality")
|
||||
}
|
||||
params := r.prepareQueryRange(ctx, ts)
|
||||
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.GetAnomaliesRequest{
|
||||
anomalies, err := r.provider.GetAnomalies(ctx, orgID, &anomaly.AnomaliesRequest{
|
||||
Params: params,
|
||||
Seasonality: r.seasonality,
|
||||
})
|
||||
@@ -229,87 +137,43 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var queryResult *v3.Result
|
||||
var queryResult *qbtypes.TimeSeriesData
|
||||
for _, result := range anomalies.Results {
|
||||
if result.QueryName == r.GetSelectedQuery() {
|
||||
if result.QueryName == r.SelectedQuery(ctx) {
|
||||
queryResult = result
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if queryResult == nil {
|
||||
r.logger.WarnContext(ctx, "nil qb result", slog.String("rule.id", r.ID()), slog.Int64("ts", ts.UnixMilli()))
|
||||
return ruletypes.Vector{}, nil
|
||||
}
|
||||
|
||||
hasData := len(queryResult.Aggregations) > 0 &&
|
||||
queryResult.Aggregations[0] != nil &&
|
||||
len(queryResult.Aggregations[0].AnomalyScores) > 0
|
||||
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
} else if !hasData {
|
||||
r.logger.WarnContext(ctx, "no anomaly result", slog.String("rule.id", r.ID()))
|
||||
return ruletypes.Vector{}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
|
||||
for _, series := range queryResult.AnomalyScores {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resultVector = append(resultVector, results...)
|
||||
}
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID, ts time.Time) (ruletypes.Vector, error) {
|
||||
|
||||
params, err := r.prepareQueryRangeV5(ctx, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
anomalies, err := r.providerV2.GetAnomalies(ctx, orgID, &anomalyV2.AnomaliesRequest{
|
||||
Params: *params,
|
||||
Seasonality: anomalyV2.Seasonality{String: valuer.NewString(r.seasonality.String())},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var qbResult *qbtypes.TimeSeriesData
|
||||
for _, result := range anomalies.Results {
|
||||
if result.QueryName == r.GetSelectedQuery() {
|
||||
qbResult = result
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if qbResult == nil {
|
||||
r.logger.WarnContext(ctx, "nil qb result", "ts", ts.UnixMilli())
|
||||
}
|
||||
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
r.logger.InfoContext(ctx, "anomaly scores", "scores", string(scoresJSON))
|
||||
scoresJSON, _ := json.Marshal(queryResult.Aggregations[0].AnomalyScores)
|
||||
// TODO(srikanthccv): this could be noisy but we do this to answer false alert requests
|
||||
r.logger.InfoContext(ctx, "anomaly scores", slog.String("rule.id", r.ID()), slog.String("anomaly.scores", string(scoresJSON)))
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
seriesToProcess := queryResult.AnomalyScores
|
||||
seriesToProcess := queryResult.Aggregations[0].AnomalyScores
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, seriesToProcess)
|
||||
// In case of error we log the error and continue with the original series
|
||||
if filterErr != nil {
|
||||
r.logger.ErrorContext(ctx, "Error filtering new series, ", errors.Attr(filterErr), "rule_name", r.Name())
|
||||
r.logger.ErrorContext(ctx, "error filtering new series", slog.String("rule.id", r.ID()), errors.Attr(filterErr))
|
||||
} else {
|
||||
seriesToProcess = filteredSeries
|
||||
}
|
||||
@@ -317,10 +181,10 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
for _, series := range seriesToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", "ruleid", r.ID(), "numPoints", len(series.Points), "requiredPoints", r.Condition().RequiredNumPoints)
|
||||
r.logger.InfoContext(ctx, "not enough data points to evaluate series, skipping", slog.String("rule.id", r.ID()), slog.Int("series.num_points", len(series.Values)), slog.Int("series.required_points", r.Condition().RequiredNumPoints))
|
||||
continue
|
||||
}
|
||||
results, err := r.Threshold.Eval(*series, r.Unit(), ruletypes.EvalData{
|
||||
results, err := r.Threshold.Eval(series, r.Unit(), ruletypes.EvalData{
|
||||
ActiveAlerts: r.ActiveAlertsLabelFP(),
|
||||
SendUnmatched: r.ShouldSendUnmatched(),
|
||||
})
|
||||
@@ -341,13 +205,9 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
var res ruletypes.Vector
|
||||
var err error
|
||||
|
||||
if r.version == "v5" {
|
||||
r.logger.InfoContext(ctx, "running v5 query")
|
||||
res, err = r.buildAndRunQueryV5(ctx, r.OrgID(), ts)
|
||||
} else {
|
||||
r.logger.InfoContext(ctx, "running v4 query")
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
}
|
||||
r.logger.InfoContext(ctx, "running query", slog.String("rule.id", r.ID()))
|
||||
res, err = r.buildAndRunQuery(ctx, r.OrgID(), ts)
|
||||
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
@@ -371,7 +231,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
value := valueFormatter.Format(smpl.V, r.Unit())
|
||||
threshold := valueFormatter.Format(smpl.Target, smpl.TargetUnit)
|
||||
r.logger.DebugContext(ctx, "Alert template data for rule", "rule_name", r.Name(), "formatter", valueFormatter.Name(), "value", value, "threshold", threshold)
|
||||
r.logger.DebugContext(ctx, "alert template data for rule", slog.String("rule.id", r.ID()), slog.String("formatter.name", valueFormatter.Name()), slog.String("alert.value", value), slog.String("alert.threshold", threshold))
|
||||
|
||||
tmplData := ruletypes.AlertTemplateData(l, value, threshold)
|
||||
// Inject some convenience variables that are easier to remember for users
|
||||
@@ -386,35 +246,34 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
defs+text,
|
||||
"__alert_"+r.Name(),
|
||||
tmplData,
|
||||
times.Time(timestamp.FromTime(ts)),
|
||||
nil,
|
||||
)
|
||||
result, err := tmpl.Expand()
|
||||
if err != nil {
|
||||
result = fmt.Sprintf("<error expanding template: %s>", err)
|
||||
r.logger.ErrorContext(ctx, "Expanding alert template failed", errors.Attr(err), "data", tmplData, "rule_name", r.Name())
|
||||
r.logger.ErrorContext(ctx, "expanding alert template failed", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.template_data", tmplData))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
lb := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel)
|
||||
resultLabels := labels.NewBuilder(smpl.Metric).Del(labels.MetricNameLabel).Del(labels.TemporalityLabel).Labels()
|
||||
lb := ruletypes.NewBuilder(smpl.Metric...).Del(ruletypes.MetricNameLabel).Del(ruletypes.TemporalityLabel)
|
||||
resultLabels := ruletypes.NewBuilder(smpl.Metric...).Del(ruletypes.MetricNameLabel).Del(ruletypes.TemporalityLabel).Labels()
|
||||
|
||||
for name, value := range r.Labels().Map() {
|
||||
lb.Set(name, expand(value))
|
||||
}
|
||||
|
||||
lb.Set(labels.AlertNameLabel, r.Name())
|
||||
lb.Set(labels.AlertRuleIdLabel, r.ID())
|
||||
lb.Set(labels.RuleSourceLabel, r.GeneratorURL())
|
||||
lb.Set(ruletypes.AlertNameLabel, r.Name())
|
||||
lb.Set(ruletypes.AlertRuleIDLabel, r.ID())
|
||||
lb.Set(ruletypes.RuleSourceLabel, r.GeneratorURL())
|
||||
|
||||
annotations := make(labels.Labels, 0, len(r.Annotations().Map()))
|
||||
annotations := make(ruletypes.Labels, 0, len(r.Annotations().Map()))
|
||||
for name, value := range r.Annotations().Map() {
|
||||
annotations = append(annotations, labels.Label{Name: name, Value: expand(value)})
|
||||
annotations = append(annotations, ruletypes.Label{Name: name, Value: expand(value)})
|
||||
}
|
||||
if smpl.IsMissing {
|
||||
lb.Set(labels.AlertNameLabel, "[No data] "+r.Name())
|
||||
lb.Set(labels.NoDataLabel, "true")
|
||||
lb.Set(ruletypes.AlertNameLabel, "[No data] "+r.Name())
|
||||
lb.Set(ruletypes.NoDataLabel, "true")
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
@@ -422,17 +281,17 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
resultFPs[h] = struct{}{}
|
||||
|
||||
if _, ok := alerts[h]; ok {
|
||||
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", "rule_id", r.ID(), "alert", alerts[h])
|
||||
err = fmt.Errorf("duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
r.logger.ErrorContext(ctx, "the alert query returns duplicate records", slog.String("rule.id", r.ID()), slog.Any("alert", alerts[h]))
|
||||
err = errors.NewInternalf(errors.CodeInternal, "duplicate alert found, vector contains metrics with the same labelset after applying alert labels")
|
||||
return 0, err
|
||||
}
|
||||
|
||||
alerts[h] = &ruletypes.Alert{
|
||||
Labels: lbs,
|
||||
QueryResultLables: resultLabels,
|
||||
QueryResultLabels: resultLabels,
|
||||
Annotations: annotations,
|
||||
ActiveAt: ts,
|
||||
State: model.StatePending,
|
||||
State: ruletypes.StatePending,
|
||||
Value: smpl.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
@@ -441,12 +300,12 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
}
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "number of alerts found", "rule_name", r.Name(), "alerts_count", len(alerts))
|
||||
r.logger.InfoContext(ctx, "number of alerts found", slog.String("rule.id", r.ID()), slog.Int("alert.count", len(alerts)))
|
||||
// alerts[h] is ready, add or update active list now
|
||||
for h, a := range alerts {
|
||||
// Check whether we already have alerting state for the identifying label set.
|
||||
// Update the last value and annotations if so, create a new alert entry otherwise.
|
||||
if alert, ok := r.Active[h]; ok && alert.State != model.StateInactive {
|
||||
if alert, ok := r.Active[h]; ok && alert.State != ruletypes.StateInactive {
|
||||
|
||||
alert.Value = a.Value
|
||||
alert.Annotations = a.Annotations
|
||||
@@ -462,76 +321,76 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
r.Active[h] = a
|
||||
}
|
||||
|
||||
itemsToAdd := []model.RuleStateHistory{}
|
||||
itemsToAdd := []rulestatehistorytypes.RuleStateHistory{}
|
||||
|
||||
// Check if any pending alerts should be removed or fire now. Write out alert timeseries.
|
||||
for fp, a := range r.Active {
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLables)
|
||||
labelsJSON, err := json.Marshal(a.QueryResultLabels)
|
||||
if err != nil {
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", errors.Attr(err), "labels", a.Labels)
|
||||
r.logger.ErrorContext(ctx, "error marshaling labels", slog.String("rule.id", r.ID()), errors.Attr(err), slog.Any("alert.labels", a.Labels))
|
||||
}
|
||||
if _, ok := resultFPs[fp]; !ok {
|
||||
// If the alert was previously firing, keep it around for a given
|
||||
// retention time so it is reported as resolved to the AlertManager.
|
||||
if a.State == model.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
if a.State == ruletypes.StatePending || (!a.ResolvedAt.IsZero() && ts.Sub(a.ResolvedAt) > ruletypes.ResolvedRetention) {
|
||||
delete(r.Active, fp)
|
||||
}
|
||||
if a.State != model.StateInactive {
|
||||
a.State = model.StateInactive
|
||||
if a.State != ruletypes.StateInactive {
|
||||
a.State = ruletypes.StateInactive
|
||||
a.ResolvedAt = ts
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: model.StateInactive,
|
||||
State: ruletypes.StateInactive,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Labels: rulestatehistorytypes.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLabels.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
|
||||
a.State = model.StateFiring
|
||||
if a.State == ruletypes.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
|
||||
a.State = ruletypes.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
state := ruletypes.StateFiring
|
||||
if a.Missing {
|
||||
state = model.StateNoData
|
||||
state = ruletypes.StateNoData
|
||||
}
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Labels: rulestatehistorytypes.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLabels.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
// We need to change firing alert to recovering if the returned sample meets recovery threshold
|
||||
changeFiringToRecovering := a.State == model.StateFiring && a.IsRecovering
|
||||
changeFiringToRecovering := a.State == ruletypes.StateFiring && a.IsRecovering
|
||||
// We need to change recovering alerts to firing if the returned sample meets target threshold
|
||||
changeRecoveringToFiring := a.State == model.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
changeRecoveringToFiring := a.State == ruletypes.StateRecovering && !a.IsRecovering && !a.Missing
|
||||
// in any of the above case we need to update the status of alert
|
||||
if changeFiringToRecovering || changeRecoveringToFiring {
|
||||
state := model.StateRecovering
|
||||
state := ruletypes.StateRecovering
|
||||
if changeRecoveringToFiring {
|
||||
state = model.StateFiring
|
||||
state = ruletypes.StateFiring
|
||||
}
|
||||
a.State = state
|
||||
r.logger.DebugContext(ctx, "converting alert state", "name", r.Name(), "state", state)
|
||||
itemsToAdd = append(itemsToAdd, model.RuleStateHistory{
|
||||
r.logger.DebugContext(ctx, "converting alert state", slog.String("rule.id", r.ID()), slog.Any("alert.state", state))
|
||||
itemsToAdd = append(itemsToAdd, rulestatehistorytypes.RuleStateHistory{
|
||||
RuleID: r.ID(),
|
||||
RuleName: r.Name(),
|
||||
State: state,
|
||||
StateChanged: true,
|
||||
UnixMilli: ts.UnixMilli(),
|
||||
Labels: model.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLables.Hash(),
|
||||
Labels: rulestatehistorytypes.LabelsString(labelsJSON),
|
||||
Fingerprint: a.QueryResultLabels.Hash(),
|
||||
Value: a.Value,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,21 +2,19 @@ package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/anomaly"
|
||||
)
|
||||
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
@@ -24,13 +22,13 @@ import (
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
responses []*anomaly.AnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.AnomaliesRequest) (*anomaly.AnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
return &anomaly.AnomaliesResponse{Results: []*qbtypes.TimeSeriesData{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
@@ -49,45 +47,46 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
RuleType: ruletypes.RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
CompareOperator: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &target,
|
||||
CompositeQuery: &ruletypes.AlertCompositeQuery{
|
||||
QueryType: ruletypes.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
Name: "Test anomaly no data",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOperator: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
responseNoData := &anomaly.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{{
|
||||
AnomalyScores: []*qbtypes.TimeSeries{},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -115,23 +114,17 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AlertOnAbsent = c.alertOnAbsent
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule(
|
||||
"test-anomaly-rule",
|
||||
valuer.GenerateUUID(),
|
||||
&postableRule,
|
||||
reader,
|
||||
nil,
|
||||
logger,
|
||||
nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
responses: []*anomaly.AnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
@@ -156,46 +149,47 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data with AbsentFor",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
RuleType: ruletypes.RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
CompareOperator: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &ruletypes.AlertCompositeQuery{
|
||||
QueryType: ruletypes.QueryTypeBuilder,
|
||||
Queries: []qbtypes.QueryEnvelope{{
|
||||
Type: qbtypes.QueryTypeBuilder,
|
||||
Spec: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
Name: "Test anomaly no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOperator: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
responseNoData := &anomaly.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{{
|
||||
AnomalyScores: []*qbtypes.TimeSeries{},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -229,32 +223,35 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
responseWithData := &anomaly.AnomaliesResponse{
|
||||
Results: []*qbtypes.TimeSeriesData{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
Aggregations: []*qbtypes.AggregationBucket{{
|
||||
AnomalyScores: []*qbtypes.TimeSeries{
|
||||
{
|
||||
Labels: []*qbtypes.Label{
|
||||
{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: "Test"},
|
||||
Value: "labels",
|
||||
},
|
||||
},
|
||||
Values: []*qbtypes.TimeSeriesValue{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(slog.Default(), nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, nil, logger)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
responses: []*anomaly.AnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
|
||||
@@ -11,9 +11,7 @@ import (
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
basemodel "github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
baserules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -23,7 +21,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules := make([]baserules.Rule, 0)
|
||||
var task baserules.Task
|
||||
|
||||
ruleId := baserules.RuleIdFromTaskName(opts.TaskName)
|
||||
ruleID := baserules.RuleIDFromTaskName(opts.TaskName)
|
||||
evaluation, err := opts.Rule.Evaluation.GetEvaluation()
|
||||
if err != nil {
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "evaluation is invalid: %v", err)
|
||||
@@ -32,10 +30,9 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
if opts.Rule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
// create a threshold rule
|
||||
tr, err := baserules.NewThresholdRule(
|
||||
ruleId,
|
||||
ruleID,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.Logger,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
@@ -58,11 +55,10 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
// create promql rule
|
||||
pr, err := baserules.NewPromRule(
|
||||
ruleId,
|
||||
ruleID,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
@@ -82,13 +78,11 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
ar, err := NewAnomalyRule(
|
||||
ruleId,
|
||||
ruleID,
|
||||
opts.OrgID,
|
||||
opts.Rule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.Logger,
|
||||
opts.Cache,
|
||||
baserules.WithEvalDelay(opts.ManagerOpts.EvalDelay),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
baserules.WithQueryParser(opts.ManagerOpts.QueryParser),
|
||||
@@ -105,7 +99,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
}
|
||||
|
||||
return task, nil
|
||||
@@ -113,12 +107,12 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
|
||||
// TestNotification prepares a dummy rule for given rule parameters and
|
||||
// sends a test notification. returns alert count and error (if any)
|
||||
func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.ApiError) {
|
||||
func TestNotification(opts baserules.PrepareTestRuleOptions) (int, error) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
if opts.Rule == nil {
|
||||
return 0, basemodel.BadRequest(fmt.Errorf("rule is required"))
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "rule is required")
|
||||
}
|
||||
|
||||
parsedRule := opts.Rule
|
||||
@@ -138,15 +132,14 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
if parsedRule.RuleType == ruletypes.RuleTypeThreshold {
|
||||
|
||||
// add special labels for test alerts
|
||||
parsedRule.Labels[labels.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[labels.AlertRuleIdLabel] = ""
|
||||
parsedRule.Labels[ruletypes.RuleSourceLabel] = ""
|
||||
parsedRule.Labels[ruletypes.AlertRuleIDLabel] = ""
|
||||
|
||||
// create a threshold rule
|
||||
rule, err = baserules.NewThresholdRule(
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.Logger,
|
||||
baserules.WithSendAlways(),
|
||||
@@ -158,7 +151,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
if err != nil {
|
||||
slog.Error("failed to prepare a new threshold rule for test", "name", alertname, errors.Attr(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
return 0, err
|
||||
}
|
||||
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeProm {
|
||||
@@ -169,7 +162,6 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Logger,
|
||||
opts.Reader,
|
||||
opts.ManagerOpts.Prometheus,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
@@ -180,7 +172,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
|
||||
if err != nil {
|
||||
slog.Error("failed to prepare a new promql rule for test", "name", alertname, errors.Attr(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
return 0, err
|
||||
}
|
||||
} else if parsedRule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
@@ -188,10 +180,8 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
alertname,
|
||||
opts.OrgID,
|
||||
parsedRule,
|
||||
opts.Reader,
|
||||
opts.Querier,
|
||||
opts.Logger,
|
||||
opts.Cache,
|
||||
baserules.WithSendAlways(),
|
||||
baserules.WithSendUnmatched(),
|
||||
baserules.WithSQLStore(opts.SQLStore),
|
||||
@@ -200,10 +190,10 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
)
|
||||
if err != nil {
|
||||
slog.Error("failed to prepare a new anomaly rule for test", "name", alertname, errors.Attr(err))
|
||||
return 0, basemodel.BadRequest(err)
|
||||
return 0, err
|
||||
}
|
||||
} else {
|
||||
return 0, basemodel.BadRequest(fmt.Errorf("failed to derive ruletype with given information"))
|
||||
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to derive ruletype with given information")
|
||||
}
|
||||
|
||||
// set timestamp to current utc time
|
||||
@@ -212,7 +202,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
alertsFound, err := rule.Eval(ctx, ts)
|
||||
if err != nil {
|
||||
slog.Error("evaluating rule failed", "rule", rule.Name(), errors.Attr(err))
|
||||
return 0, basemodel.InternalError(fmt.Errorf("rule evaluation failed"))
|
||||
return 0, err
|
||||
}
|
||||
rule.SendAlerts(ctx, ts, 0, time.Minute, opts.NotifyFunc)
|
||||
|
||||
|
||||
@@ -114,11 +114,8 @@ func TestManager_TestNotification_SendUnmatched_ThresholdRule(t *testing.T) {
|
||||
},
|
||||
})
|
||||
|
||||
count, apiErr := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
if apiErr != nil {
|
||||
t.Logf("TestNotification error: %v, type: %s", apiErr.Err, apiErr.Typ)
|
||||
}
|
||||
require.Nil(t, apiErr)
|
||||
count, err := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
require.Nil(t, err)
|
||||
assert.Equal(t, tc.ExpectAlerts, count)
|
||||
|
||||
if tc.ExpectAlerts > 0 {
|
||||
@@ -268,11 +265,8 @@ func TestManager_TestNotification_SendUnmatched_PromRule(t *testing.T) {
|
||||
},
|
||||
})
|
||||
|
||||
count, apiErr := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
if apiErr != nil {
|
||||
t.Logf("TestNotification error: %v, type: %s", apiErr.Err, apiErr.Typ)
|
||||
}
|
||||
require.Nil(t, apiErr)
|
||||
count, err := mgr.TestNotification(context.Background(), orgID, string(ruleBytes))
|
||||
require.Nil(t, err)
|
||||
assert.Equal(t, tc.ExpectAlerts, count)
|
||||
|
||||
if tc.ExpectAlerts > 0 {
|
||||
|
||||
@@ -68,8 +68,8 @@
|
||||
"@signozhq/toggle-group": "0.0.1",
|
||||
"@signozhq/tooltip": "0.0.2",
|
||||
"@signozhq/ui": "0.0.5",
|
||||
"@tanstack/react-table": "8.20.6",
|
||||
"@tanstack/react-virtual": "3.11.2",
|
||||
"@tanstack/react-table": "8.21.3",
|
||||
"@tanstack/react-virtual": "3.13.22",
|
||||
"@uiw/codemirror-theme-copilot": "4.23.11",
|
||||
"@uiw/codemirror-theme-github": "4.24.1",
|
||||
"@uiw/react-codemirror": "4.23.10",
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { ReactChild, useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { matchPath, Redirect, useLocation } from 'react-router-dom';
|
||||
import getLocalStorageApi from 'api/browser/localstorage/get';
|
||||
import setLocalStorageApi from 'api/browser/localstorage/set';
|
||||
import getAll from 'api/v1/user/get';
|
||||
import { useListUsers } from 'api/generated/services/users';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { ORG_PREFERENCES } from 'constants/orgPreferences';
|
||||
@@ -12,12 +11,9 @@ import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { LicensePlatform, LicenseState } from 'types/api/licensesV3/getActive';
|
||||
import { OrgPreference } from 'types/api/preferences/preference';
|
||||
import { Organization } from 'types/api/user/getOrganization';
|
||||
import { UserResponse } from 'types/api/user/getUser';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
import { routePermission } from 'utils/permission';
|
||||
|
||||
@@ -63,18 +59,10 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
|
||||
const [orgData, setOrgData] = useState<Organization | undefined>(undefined);
|
||||
|
||||
const { data: usersData, isFetching: isFetchingUsers } = useQuery<
|
||||
SuccessResponseV2<UserResponse[]> | undefined,
|
||||
APIError
|
||||
>({
|
||||
queryFn: () => {
|
||||
if (orgData && orgData.id !== undefined) {
|
||||
return getAll();
|
||||
}
|
||||
return undefined;
|
||||
const { data: usersData, isFetching: isFetchingUsers } = useListUsers({
|
||||
query: {
|
||||
enabled: !isEmpty(orgData) && user.role === 'ADMIN',
|
||||
},
|
||||
queryKey: ['getOrgUser'],
|
||||
enabled: !isEmpty(orgData) && user.role === 'ADMIN',
|
||||
});
|
||||
|
||||
const checkFirstTimeUser = useCallback((): boolean => {
|
||||
|
||||
@@ -67,9 +67,12 @@ jest.mock('hooks/useGetTenantLicense', () => ({
|
||||
|
||||
// Mock react-query for users fetch
|
||||
let mockUsersData: { email: string }[] = [];
|
||||
jest.mock('api/v1/user/get', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => Promise.resolve({ data: mockUsersData })),
|
||||
jest.mock('api/generated/services/users', () => ({
|
||||
...jest.requireActual('api/generated/services/users'),
|
||||
useListUsers: jest.fn(() => ({
|
||||
data: { data: mockUsersData },
|
||||
isFetching: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
const queryClient = new QueryClient({
|
||||
|
||||
@@ -18,7 +18,7 @@ import AppLayout from 'container/AppLayout';
|
||||
import Hex from 'crypto-js/enc-hex';
|
||||
import HmacSHA256 from 'crypto-js/hmac-sha256';
|
||||
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { NotificationProvider } from 'hooks/useNotifications';
|
||||
import { ResourceProvider } from 'hooks/useResourceAttribute';
|
||||
@@ -212,6 +212,12 @@ function App(): JSX.Element {
|
||||
activeLicenseFetchError,
|
||||
]);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
useEffect(() => {
|
||||
window.Pylon?.('setTheme', isDarkMode ? 'dark' : 'light');
|
||||
}, [isDarkMode]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
pathname === ROUTES.ONBOARDING ||
|
||||
|
||||
@@ -33,8 +33,6 @@ import type {
|
||||
DisconnectAccountPathParameters,
|
||||
GetAccount200,
|
||||
GetAccountPathParameters,
|
||||
GetConnectionCredentials200,
|
||||
GetConnectionCredentialsPathParameters,
|
||||
GetService200,
|
||||
GetServicePathParameters,
|
||||
ListAccounts200,
|
||||
@@ -630,103 +628,6 @@ export const useUpdateAccount = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint updates a service for the specified cloud provider
|
||||
* @summary Update service
|
||||
*/
|
||||
export const updateService = (
|
||||
{ cloudProvider, id, serviceId }: UpdateServicePathParameters,
|
||||
cloudintegrationtypesUpdatableServiceDTO: BodyType<CloudintegrationtypesUpdatableServiceDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/accounts/${id}/services/${serviceId}`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesUpdatableServiceDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateService'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateService(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateService>>
|
||||
>;
|
||||
export type UpdateServiceMutationBody = BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
export type UpdateServiceMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Update service
|
||||
*/
|
||||
export const useUpdateService = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint is called by the deployed agent to check in
|
||||
* @summary Agent check-in
|
||||
@@ -826,114 +727,6 @@ export const useAgentCheckIn = <
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
/**
|
||||
* This endpoint retrieves the connection credentials required for integration
|
||||
* @summary Get connection credentials
|
||||
*/
|
||||
export const getConnectionCredentials = (
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<GetConnectionCredentials200>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/credentials`,
|
||||
method: 'GET',
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getGetConnectionCredentialsQueryKey = ({
|
||||
cloudProvider,
|
||||
}: GetConnectionCredentialsPathParameters) => {
|
||||
return [`/api/v1/cloud_integrations/${cloudProvider}/credentials`] as const;
|
||||
};
|
||||
|
||||
export const getGetConnectionCredentialsQueryOptions = <
|
||||
TData = Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
) => {
|
||||
const { query: queryOptions } = options ?? {};
|
||||
|
||||
const queryKey =
|
||||
queryOptions?.queryKey ??
|
||||
getGetConnectionCredentialsQueryKey({ cloudProvider });
|
||||
|
||||
const queryFn: QueryFunction<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>
|
||||
> = ({ signal }) => getConnectionCredentials({ cloudProvider }, signal);
|
||||
|
||||
return {
|
||||
queryKey,
|
||||
queryFn,
|
||||
enabled: !!cloudProvider,
|
||||
...queryOptions,
|
||||
} as UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError,
|
||||
TData
|
||||
> & { queryKey: QueryKey };
|
||||
};
|
||||
|
||||
export type GetConnectionCredentialsQueryResult = NonNullable<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>
|
||||
>;
|
||||
export type GetConnectionCredentialsQueryError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Get connection credentials
|
||||
*/
|
||||
|
||||
export function useGetConnectionCredentials<
|
||||
TData = Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError = ErrorType<RenderErrorResponseDTO>
|
||||
>(
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
options?: {
|
||||
query?: UseQueryOptions<
|
||||
Awaited<ReturnType<typeof getConnectionCredentials>>,
|
||||
TError,
|
||||
TData
|
||||
>;
|
||||
},
|
||||
): UseQueryResult<TData, TError> & { queryKey: QueryKey } {
|
||||
const queryOptions = getGetConnectionCredentialsQueryOptions(
|
||||
{ cloudProvider },
|
||||
options,
|
||||
);
|
||||
|
||||
const query = useQuery(queryOptions) as UseQueryResult<TData, TError> & {
|
||||
queryKey: QueryKey;
|
||||
};
|
||||
|
||||
query.queryKey = queryOptions.queryKey;
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
/**
|
||||
* @summary Get connection credentials
|
||||
*/
|
||||
export const invalidateGetConnectionCredentials = async (
|
||||
queryClient: QueryClient,
|
||||
{ cloudProvider }: GetConnectionCredentialsPathParameters,
|
||||
options?: InvalidateOptions,
|
||||
): Promise<QueryClient> => {
|
||||
await queryClient.invalidateQueries(
|
||||
{ queryKey: getGetConnectionCredentialsQueryKey({ cloudProvider }) },
|
||||
options,
|
||||
);
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint lists the services metadata for the specified cloud provider
|
||||
* @summary List services metadata
|
||||
@@ -1148,3 +941,101 @@ export const invalidateGetService = async (
|
||||
|
||||
return queryClient;
|
||||
};
|
||||
|
||||
/**
|
||||
* This endpoint updates a service for the specified cloud provider
|
||||
* @summary Update service
|
||||
*/
|
||||
export const updateService = (
|
||||
{ cloudProvider, serviceId }: UpdateServicePathParameters,
|
||||
cloudintegrationtypesUpdatableServiceDTO: BodyType<CloudintegrationtypesUpdatableServiceDTO>,
|
||||
) => {
|
||||
return GeneratedAPIInstance<void>({
|
||||
url: `/api/v1/cloud_integrations/${cloudProvider}/services/${serviceId}`,
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: cloudintegrationtypesUpdatableServiceDTO,
|
||||
});
|
||||
};
|
||||
|
||||
export const getUpdateServiceMutationOptions = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['updateService'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
}
|
||||
> = (props) => {
|
||||
const { pathParams, data } = props ?? {};
|
||||
|
||||
return updateService(pathParams, data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type UpdateServiceMutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof updateService>>
|
||||
>;
|
||||
export type UpdateServiceMutationBody = BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
export type UpdateServiceMutationError = ErrorType<RenderErrorResponseDTO>;
|
||||
|
||||
/**
|
||||
* @summary Update service
|
||||
*/
|
||||
export const useUpdateService = <
|
||||
TError = ErrorType<RenderErrorResponseDTO>,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof updateService>>,
|
||||
TError,
|
||||
{
|
||||
pathParams: UpdateServicePathParameters;
|
||||
data: BodyType<CloudintegrationtypesUpdatableServiceDTO>;
|
||||
},
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getUpdateServiceMutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
|
||||
@@ -425,39 +425,6 @@ export interface AuthtypesSessionContextDTO {
|
||||
orgs?: AuthtypesOrgSessionContextDTO[] | null;
|
||||
}
|
||||
|
||||
export interface AuthtypesStorableRoleDTO {
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
description?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
name?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
type?: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
}
|
||||
|
||||
export interface AuthtypesTransactionDTO {
|
||||
object: AuthtypesObjectDTO;
|
||||
/**
|
||||
@@ -475,25 +442,25 @@ export interface AuthtypesUserRoleDTO {
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
createdAt: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
role?: AuthtypesStorableRoleDTO;
|
||||
role: AuthtypesRoleDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
roleId?: string;
|
||||
roleId: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
updatedAt: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
userId?: string;
|
||||
userId: string;
|
||||
}
|
||||
|
||||
export interface AuthtypesUserWithRolesDTO {
|
||||
@@ -550,12 +517,12 @@ export type CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets = {
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesAWSCollectionStrategyDTO {
|
||||
logs?: CloudintegrationtypesAWSLogsStrategyDTO;
|
||||
metrics?: CloudintegrationtypesAWSMetricsStrategyDTO;
|
||||
aws_logs?: CloudintegrationtypesAWSLogsStrategyDTO;
|
||||
aws_metrics?: CloudintegrationtypesAWSMetricsStrategyDTO;
|
||||
/**
|
||||
* @type object
|
||||
*/
|
||||
s3Buckets?: CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets;
|
||||
s3_buckets?: CloudintegrationtypesAWSCollectionStrategyDTOS3Buckets;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSConnectionArtifactDTO {
|
||||
@@ -588,11 +555,11 @@ export type CloudintegrationtypesAWSLogsStrategyDTOCloudwatchLogsSubscriptionsIt
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
filterPattern?: string;
|
||||
filter_pattern?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
logGroupNamePrefix?: string;
|
||||
log_group_name_prefix?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesAWSLogsStrategyDTO {
|
||||
@@ -600,7 +567,7 @@ export interface CloudintegrationtypesAWSLogsStrategyDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatchLogsSubscriptions?:
|
||||
cloudwatch_logs_subscriptions?:
|
||||
| CloudintegrationtypesAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem[]
|
||||
| null;
|
||||
}
|
||||
@@ -621,7 +588,7 @@ export interface CloudintegrationtypesAWSMetricsStrategyDTO {
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatchMetricStreamFilters?:
|
||||
cloudwatch_metric_stream_filters?:
|
||||
| CloudintegrationtypesAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem[]
|
||||
| null;
|
||||
}
|
||||
@@ -643,7 +610,7 @@ export interface CloudintegrationtypesAWSServiceLogsConfigDTO {
|
||||
/**
|
||||
* @type object
|
||||
*/
|
||||
s3Buckets?: CloudintegrationtypesAWSServiceLogsConfigDTOS3Buckets;
|
||||
s3_buckets?: CloudintegrationtypesAWSServiceLogsConfigDTOS3Buckets;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesAWSServiceMetricsConfigDTO {
|
||||
@@ -726,32 +693,6 @@ export interface CloudintegrationtypesAssetsDTO {
|
||||
dashboards?: CloudintegrationtypesDashboardDTO[] | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
export type CloudintegrationtypesCloudIntegrationServiceDTO = {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
cloudIntegrationId?: string;
|
||||
config?: CloudintegrationtypesServiceConfigDTO;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
id: string;
|
||||
type?: CloudintegrationtypesServiceIDDTO;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
updatedAt?: Date;
|
||||
} | null;
|
||||
|
||||
export interface CloudintegrationtypesCollectedLogAttributeDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -795,11 +736,6 @@ export interface CloudintegrationtypesConnectionArtifactDTO {
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesConnectionArtifactRequestDTO {
|
||||
config: CloudintegrationtypesConnectionArtifactRequestConfigDTO;
|
||||
credentials: CloudintegrationtypesSignozCredentialsDTO;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesConnectionArtifactRequestConfigDTO {
|
||||
aws: CloudintegrationtypesAWSConnectionArtifactRequestDTO;
|
||||
}
|
||||
|
||||
@@ -895,68 +831,9 @@ export type CloudintegrationtypesIntegrationConfigDTO = {
|
||||
* @type array
|
||||
*/
|
||||
enabled_regions: string[];
|
||||
telemetry: CloudintegrationtypesOldAWSCollectionStrategyDTO;
|
||||
telemetry: CloudintegrationtypesAWSCollectionStrategyDTO;
|
||||
} | null;
|
||||
|
||||
export type CloudintegrationtypesOldAWSCollectionStrategyDTOS3Buckets = {
|
||||
[key: string]: string[];
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesOldAWSCollectionStrategyDTO {
|
||||
aws_logs?: CloudintegrationtypesOldAWSLogsStrategyDTO;
|
||||
aws_metrics?: CloudintegrationtypesOldAWSMetricsStrategyDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
provider?: string;
|
||||
/**
|
||||
* @type object
|
||||
*/
|
||||
s3_buckets?: CloudintegrationtypesOldAWSCollectionStrategyDTOS3Buckets;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesOldAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem = {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
filter_pattern?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
log_group_name_prefix?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesOldAWSLogsStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatch_logs_subscriptions?:
|
||||
| CloudintegrationtypesOldAWSLogsStrategyDTOCloudwatchLogsSubscriptionsItem[]
|
||||
| null;
|
||||
}
|
||||
|
||||
export type CloudintegrationtypesOldAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem = {
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
MetricNames?: string[];
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
Namespace?: string;
|
||||
};
|
||||
|
||||
export interface CloudintegrationtypesOldAWSMetricsStrategyDTO {
|
||||
/**
|
||||
* @type array
|
||||
* @nullable true
|
||||
*/
|
||||
cloudwatch_metric_stream_filters?:
|
||||
| CloudintegrationtypesOldAWSMetricsStrategyDTOCloudwatchMetricStreamFiltersItem[]
|
||||
| null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @nullable
|
||||
*/
|
||||
@@ -994,7 +871,6 @@ export interface CloudintegrationtypesProviderIntegrationConfigDTO {
|
||||
|
||||
export interface CloudintegrationtypesServiceDTO {
|
||||
assets: CloudintegrationtypesAssetsDTO;
|
||||
cloudIntegrationService: CloudintegrationtypesCloudIntegrationServiceDTO;
|
||||
dataCollected: CloudintegrationtypesDataCollectedDTO;
|
||||
/**
|
||||
* @type string
|
||||
@@ -1008,7 +884,8 @@ export interface CloudintegrationtypesServiceDTO {
|
||||
* @type string
|
||||
*/
|
||||
overview: string;
|
||||
supportedSignals: CloudintegrationtypesSupportedSignalsDTO;
|
||||
serviceConfig?: CloudintegrationtypesServiceConfigDTO;
|
||||
supported_signals: CloudintegrationtypesSupportedSignalsDTO;
|
||||
telemetryCollectionStrategy: CloudintegrationtypesCollectionStrategyDTO;
|
||||
/**
|
||||
* @type string
|
||||
@@ -1020,21 +897,6 @@ export interface CloudintegrationtypesServiceConfigDTO {
|
||||
aws: CloudintegrationtypesAWSServiceConfigDTO;
|
||||
}
|
||||
|
||||
export enum CloudintegrationtypesServiceIDDTO {
|
||||
alb = 'alb',
|
||||
'api-gateway' = 'api-gateway',
|
||||
dynamodb = 'dynamodb',
|
||||
ec2 = 'ec2',
|
||||
ecs = 'ecs',
|
||||
eks = 'eks',
|
||||
elasticache = 'elasticache',
|
||||
lambda = 'lambda',
|
||||
msk = 'msk',
|
||||
rds = 'rds',
|
||||
s3sync = 's3sync',
|
||||
sns = 'sns',
|
||||
sqs = 'sqs',
|
||||
}
|
||||
export interface CloudintegrationtypesServiceMetadataDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
@@ -1054,25 +916,6 @@ export interface CloudintegrationtypesServiceMetadataDTO {
|
||||
title: string;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesSignozCredentialsDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
ingestionKey: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
ingestionUrl: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
sigNozApiKey: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
sigNozApiURL: string;
|
||||
}
|
||||
|
||||
export interface CloudintegrationtypesSupportedSignalsDTO {
|
||||
/**
|
||||
* @type boolean
|
||||
@@ -2834,14 +2677,6 @@ export interface RenderErrorResponseDTO {
|
||||
status: string;
|
||||
}
|
||||
|
||||
export enum RulestatehistorytypesAlertStateDTO {
|
||||
inactive = 'inactive',
|
||||
pending = 'pending',
|
||||
recovering = 'recovering',
|
||||
firing = 'firing',
|
||||
nodata = 'nodata',
|
||||
disabled = 'disabled',
|
||||
}
|
||||
export interface RulestatehistorytypesGettableRuleStateHistoryDTO {
|
||||
/**
|
||||
* @type integer
|
||||
@@ -2853,7 +2688,7 @@ export interface RulestatehistorytypesGettableRuleStateHistoryDTO {
|
||||
* @nullable true
|
||||
*/
|
||||
labels: Querybuildertypesv5LabelDTO[] | null;
|
||||
overallState: RulestatehistorytypesAlertStateDTO;
|
||||
overallState: RuletypesAlertStateDTO;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
@@ -2861,12 +2696,12 @@ export interface RulestatehistorytypesGettableRuleStateHistoryDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
ruleID: string;
|
||||
ruleId: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
ruleName: string;
|
||||
state: RulestatehistorytypesAlertStateDTO;
|
||||
state: RuletypesAlertStateDTO;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
@@ -2964,9 +2799,17 @@ export interface RulestatehistorytypesGettableRuleStateWindowDTO {
|
||||
* @format int64
|
||||
*/
|
||||
start: number;
|
||||
state: RulestatehistorytypesAlertStateDTO;
|
||||
state: RuletypesAlertStateDTO;
|
||||
}
|
||||
|
||||
export enum RuletypesAlertStateDTO {
|
||||
inactive = 'inactive',
|
||||
pending = 'pending',
|
||||
recovering = 'recovering',
|
||||
firing = 'firing',
|
||||
nodata = 'nodata',
|
||||
disabled = 'disabled',
|
||||
}
|
||||
export interface ServiceaccounttypesGettableFactorAPIKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -3656,11 +3499,6 @@ export type UpdateAccountPathParameters = {
|
||||
cloudProvider: string;
|
||||
id: string;
|
||||
};
|
||||
export type UpdateServicePathParameters = {
|
||||
cloudProvider: string;
|
||||
id: string;
|
||||
serviceId: string;
|
||||
};
|
||||
export type AgentCheckInPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
@@ -3672,17 +3510,6 @@ export type AgentCheckIn200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type GetConnectionCredentialsPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
export type GetConnectionCredentials200 = {
|
||||
data: CloudintegrationtypesSignozCredentialsDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type ListServicesMetadataPathParameters = {
|
||||
cloudProvider: string;
|
||||
};
|
||||
@@ -3706,6 +3533,10 @@ export type GetService200 = {
|
||||
status: string;
|
||||
};
|
||||
|
||||
export type UpdateServicePathParameters = {
|
||||
cloudProvider: string;
|
||||
serviceId: string;
|
||||
};
|
||||
export type CreateSessionByGoogleCallback303 = {
|
||||
data: AuthtypesGettableTokenDTO;
|
||||
/**
|
||||
@@ -4749,7 +4580,7 @@ export type GetRuleHistoryTimelineParams = {
|
||||
/**
|
||||
* @description undefined
|
||||
*/
|
||||
state?: RulestatehistorytypesAlertStateDTO;
|
||||
state?: RuletypesAlertStateDTO;
|
||||
/**
|
||||
* @type string
|
||||
* @description undefined
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
import { ApiV2Instance as axios } from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps, Props } from 'types/api/user/editOrg';
|
||||
|
||||
const editOrg = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.put(`/orgs/me`, {
|
||||
displayName: props.displayName,
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 204,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default editOrg;
|
||||
@@ -1,28 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandler } from 'api/ErrorResponseHandler';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorResponse, SuccessResponse } from 'types/api';
|
||||
import { PayloadProps } from 'types/api/user/getOrganization';
|
||||
|
||||
const getOrganization = async (
|
||||
token?: string,
|
||||
): Promise<SuccessResponse<PayloadProps> | ErrorResponse> => {
|
||||
try {
|
||||
const response = await axios.get(`/org`, {
|
||||
headers: {
|
||||
Authorization: `bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
statusCode: 200,
|
||||
error: null,
|
||||
message: response.data.status,
|
||||
payload: response.data,
|
||||
};
|
||||
} catch (error) {
|
||||
return ErrorResponseHandler(error as AxiosError);
|
||||
}
|
||||
};
|
||||
|
||||
export default getOrganization;
|
||||
@@ -1,21 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { UserResponse } from 'types/api/user/getUser';
|
||||
import { PayloadProps } from 'types/api/user/getUsers';
|
||||
|
||||
const getAll = async (): Promise<SuccessResponseV2<UserResponse[]>> => {
|
||||
try {
|
||||
const response = await axios.get<PayloadProps>(`/user`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getAll;
|
||||
@@ -1,22 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, Props, UserResponse } from 'types/api/user/getUser';
|
||||
|
||||
const getUser = async (
|
||||
props: Props,
|
||||
): Promise<SuccessResponseV2<UserResponse>> => {
|
||||
try {
|
||||
const response = await axios.get<PayloadProps>(`/user/${props.userId}`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default getUser;
|
||||
@@ -1,23 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { Props } from 'types/api/user/editUser';
|
||||
|
||||
const update = async (props: Props): Promise<SuccessResponseV2<null>> => {
|
||||
try {
|
||||
const response = await axios.put(`/user/${props.userId}`, {
|
||||
displayName: props.displayName,
|
||||
role: props.role,
|
||||
});
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: null,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default update;
|
||||
@@ -1,20 +0,0 @@
|
||||
import axios from 'api';
|
||||
import { ErrorResponseHandlerV2 } from 'api/ErrorResponseHandlerV2';
|
||||
import { AxiosError } from 'axios';
|
||||
import { ErrorV2Resp, SuccessResponseV2 } from 'types/api';
|
||||
import { PayloadProps, UserResponse } from 'types/api/user/getUser';
|
||||
|
||||
const get = async (): Promise<SuccessResponseV2<UserResponse>> => {
|
||||
try {
|
||||
const response = await axios.get<PayloadProps>(`/user/me`);
|
||||
|
||||
return {
|
||||
httpStatusCode: response.status,
|
||||
data: response.data.data,
|
||||
};
|
||||
} catch (error) {
|
||||
ErrorResponseHandlerV2(error as AxiosError<ErrorV2Resp>);
|
||||
}
|
||||
};
|
||||
|
||||
export default get;
|
||||
|
Before Width: | Height: | Size: 9.1 KiB After Width: | Height: | Size: 9.1 KiB |
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 11 KiB |
|
Before Width: | Height: | Size: 2.9 KiB After Width: | Height: | Size: 2.9 KiB |
|
Before Width: | Height: | Size: 507 B After Width: | Height: | Size: 507 B |
|
Before Width: | Height: | Size: 9.0 KiB After Width: | Height: | Size: 9.0 KiB |
|
Before Width: | Height: | Size: 644 B After Width: | Height: | Size: 644 B |
|
Before Width: | Height: | Size: 5.2 KiB After Width: | Height: | Size: 5.2 KiB |
|
Before Width: | Height: | Size: 1.6 KiB After Width: | Height: | Size: 1.6 KiB |
|
Before Width: | Height: | Size: 4.5 KiB After Width: | Height: | Size: 4.5 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 5.2 KiB After Width: | Height: | Size: 5.2 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 2.0 KiB After Width: | Height: | Size: 2.0 KiB |
|
Before Width: | Height: | Size: 5.9 KiB After Width: | Height: | Size: 5.9 KiB |
|
Before Width: | Height: | Size: 5.6 KiB After Width: | Height: | Size: 5.6 KiB |
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 418 B After Width: | Height: | Size: 418 B |
|
Before Width: | Height: | Size: 604 B After Width: | Height: | Size: 604 B |
|
Before Width: | Height: | Size: 878 B After Width: | Height: | Size: 878 B |
|
Before Width: | Height: | Size: 7.3 KiB After Width: | Height: | Size: 7.3 KiB |
|
Before Width: | Height: | Size: 7.5 KiB After Width: | Height: | Size: 7.5 KiB |
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 3.2 KiB |
|
Before Width: | Height: | Size: 6.1 KiB After Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 88 KiB After Width: | Height: | Size: 88 KiB |
|
Before Width: | Height: | Size: 1.0 KiB After Width: | Height: | Size: 1.0 KiB |
|
Before Width: | Height: | Size: 3.4 KiB After Width: | Height: | Size: 3.4 KiB |
|
Before Width: | Height: | Size: 303 B After Width: | Height: | Size: 303 B |
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
|
Before Width: | Height: | Size: 629 B After Width: | Height: | Size: 629 B |
|
Before Width: | Height: | Size: 439 B After Width: | Height: | Size: 439 B |
|
Before Width: | Height: | Size: 467 B After Width: | Height: | Size: 467 B |
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 2.1 KiB |
|
Before Width: | Height: | Size: 2.1 KiB After Width: | Height: | Size: 2.1 KiB |
|
Before Width: | Height: | Size: 300 B After Width: | Height: | Size: 300 B |
|
Before Width: | Height: | Size: 348 B After Width: | Height: | Size: 348 B |
|
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 3.6 KiB |
|
Before Width: | Height: | Size: 3.2 KiB After Width: | Height: | Size: 3.2 KiB |
|
Before Width: | Height: | Size: 7.2 KiB After Width: | Height: | Size: 7.2 KiB |
|
Before Width: | Height: | Size: 6.8 KiB After Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 910 B After Width: | Height: | Size: 910 B |
|
Before Width: | Height: | Size: 105 KiB After Width: | Height: | Size: 105 KiB |
|
Before Width: | Height: | Size: 214 KiB After Width: | Height: | Size: 214 KiB |
|
Before Width: | Height: | Size: 204 KiB After Width: | Height: | Size: 204 KiB |
|
Before Width: | Height: | Size: 5.4 KiB After Width: | Height: | Size: 5.4 KiB |
|
Before Width: | Height: | Size: 31 KiB After Width: | Height: | Size: 31 KiB |
|
Before Width: | Height: | Size: 3.3 KiB After Width: | Height: | Size: 3.3 KiB |
|
Before Width: | Height: | Size: 408 KiB After Width: | Height: | Size: 408 KiB |
|
Before Width: | Height: | Size: 194 KiB After Width: | Height: | Size: 194 KiB |
|
Before Width: | Height: | Size: 43 KiB After Width: | Height: | Size: 43 KiB |
|
Before Width: | Height: | Size: 95 KiB After Width: | Height: | Size: 95 KiB |
|
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.7 MiB |
|
Before Width: | Height: | Size: 62 KiB After Width: | Height: | Size: 62 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 2.7 KiB After Width: | Height: | Size: 2.7 KiB |
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 1.5 KiB |
|
Before Width: | Height: | Size: 1.4 KiB After Width: | Height: | Size: 1.4 KiB |
|
Before Width: | Height: | Size: 695 B After Width: | Height: | Size: 695 B |
|
Before Width: | Height: | Size: 714 B After Width: | Height: | Size: 714 B |
|
Before Width: | Height: | Size: 3.1 KiB After Width: | Height: | Size: 3.1 KiB |
|
Before Width: | Height: | Size: 872 B After Width: | Height: | Size: 872 B |
|
Before Width: | Height: | Size: 6.1 KiB After Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 20 KiB |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 1.1 KiB |
|
Before Width: | Height: | Size: 6.1 KiB After Width: | Height: | Size: 6.1 KiB |
|
Before Width: | Height: | Size: 2.8 KiB After Width: | Height: | Size: 2.8 KiB |
|
Before Width: | Height: | Size: 3.0 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.3 KiB |
|
Before Width: | Height: | Size: 6.0 KiB After Width: | Height: | Size: 6.0 KiB |
|
Before Width: | Height: | Size: 2.4 KiB After Width: | Height: | Size: 2.4 KiB |