Compare commits

..

1 Commits

Author SHA1 Message Date
Swapnil Nakade
824c2c53a7 feat: adding tests for cloudintegration api 2026-02-09 02:35:15 +05:30
12 changed files with 1701 additions and 1728 deletions

View File

@@ -3354,62 +3354,6 @@ paths:
summary: Rotate session
tags:
- sessions
/api/v5/query_range:
post:
deprecated: false
description: Execute a composite query over a time range. Supports builder queries
(traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse
SQL.
operationId: QueryRangeV5
requestBody:
content:
application/json:
schema:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
responses:
"200":
content:
application/json:
schema:
properties:
data:
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeResponse'
status:
type: string
type: object
description: OK
"400":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Bad Request
"401":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Unauthorized
"403":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Forbidden
"500":
content:
application/json:
schema:
$ref: '#/components/schemas/RenderErrorResponse'
description: Internal Server Error
security:
- api_key:
- VIEWER
- tokenizer:
- VIEWER
summary: Query range
tags:
- query
components:
schemas:
AuthtypesAttributeMapping:
@@ -3984,9 +3928,19 @@ components:
isMonotonic:
type: boolean
temporality:
$ref: '#/components/schemas/MetrictypesTemporality'
enum:
- delta
- cumulative
- unspecified
type: string
type:
$ref: '#/components/schemas/MetrictypesType'
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
unit:
type: string
required:
@@ -4009,7 +3963,13 @@ components:
minimum: 0
type: integer
type:
$ref: '#/components/schemas/MetrictypesType'
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
unit:
type: string
required:
@@ -4070,11 +4030,6 @@ components:
- percentage
- totalValue
type: object
MetricsexplorertypesTreemapMode:
enum:
- timeseries
- samples
type: string
MetricsexplorertypesTreemapRequest:
properties:
end:
@@ -4085,7 +4040,10 @@ components:
limit:
type: integer
mode:
$ref: '#/components/schemas/MetricsexplorertypesTreemapMode'
enum:
- timeseries
- samples
type: string
start:
format: int64
type: integer
@@ -4120,9 +4078,19 @@ components:
metricName:
type: string
temporality:
$ref: '#/components/schemas/MetrictypesTemporality'
enum:
- delta
- cumulative
- unspecified
type: string
type:
$ref: '#/components/schemas/MetrictypesType'
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
unit:
type: string
required:
@@ -4133,20 +4101,6 @@ components:
- temporality
- isMonotonic
type: object
MetrictypesTemporality:
enum:
- delta
- cumulative
- unspecified
type: string
MetrictypesType:
enum:
- gauge
- sum
- histogram
- summary
- exponentialhistogram
type: string
PreferencetypesPreference:
properties:
allowedScopes:
@@ -4196,101 +4150,7 @@ components:
type:
type: string
type: object
Querybuildertypesv5AggregationBucket:
properties:
alias:
type: string
anomalyScores:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
index:
type: integer
lowerBoundSeries:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
meta:
properties:
unit:
type: string
type: object
predictedSeries:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
series:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
nullable: true
type: array
upperBoundSeries:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
type: array
type: object
Querybuildertypesv5Bucket:
properties:
step:
format: double
type: number
type: object
Querybuildertypesv5ClickHouseQuery:
properties:
disabled:
type: boolean
legend:
type: string
name:
type: string
query:
type: string
type: object
Querybuildertypesv5ColumnDescriptor:
properties:
aggregationIndex:
format: int64
type: integer
columnType:
$ref: '#/components/schemas/Querybuildertypesv5ColumnType'
description:
type: string
fieldContext:
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
meta:
properties:
unit:
type: string
type: object
name:
type: string
queryName:
type: string
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object
Querybuildertypesv5ColumnType:
enum:
- group
- aggregation
type: string
Querybuildertypesv5CompositeQuery:
description: Composite query containing one or more query envelopes. Each query
envelope specifies its type and corresponding spec.
properties:
queries:
items:
$ref: '#/components/schemas/Querybuildertypesv5QueryEnvelope'
nullable: true
type: array
type: object
Querybuildertypesv5ExecStats:
description: Execution statistics for the query, including rows scanned, bytes
scanned, and duration.
properties:
bytesScanned:
minimum: 0
@@ -4312,109 +4172,10 @@ components:
expression:
type: string
type: object
Querybuildertypesv5FormatOptions:
properties:
fillGaps:
type: boolean
formatTableResultForUI:
type: boolean
type: object
Querybuildertypesv5Function:
properties:
args:
items:
$ref: '#/components/schemas/Querybuildertypesv5FunctionArg'
type: array
name:
$ref: '#/components/schemas/Querybuildertypesv5FunctionName'
type: object
Querybuildertypesv5FunctionArg:
properties:
name:
type: string
value: {}
type: object
Querybuildertypesv5FunctionName:
enum:
- cutoffmin
- cutoffmax
- clampmin
- clampmax
- absolute
- runningdiff
- log2
- log10
- cumulativesum
- ewma3
- ewma5
- ewma7
- median3
- median5
- median7
- timeshift
- anomaly
- fillzero
type: string
Querybuildertypesv5GroupByKey:
properties:
description:
type: string
fieldContext:
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
name:
type: string
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object
Querybuildertypesv5Having:
properties:
expression:
type: string
type: object
Querybuildertypesv5Label:
properties:
key:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
value: {}
type: object
Querybuildertypesv5LimitBy:
properties:
keys:
items:
type: string
nullable: true
type: array
value:
type: string
type: object
Querybuildertypesv5LogAggregation:
properties:
alias:
type: string
expression:
type: string
type: object
Querybuildertypesv5MetricAggregation:
properties:
metricName:
type: string
reduceTo:
$ref: '#/components/schemas/Querybuildertypesv5ReduceTo'
spaceAggregation:
type: string
temporality:
type: string
timeAggregation:
type: string
type: object
Querybuildertypesv5OrderBy:
properties:
direction:
$ref: '#/components/schemas/Querybuildertypesv5OrderDirection'
type: string
key:
$ref: '#/components/schemas/Querybuildertypesv5OrderByKey'
type: object
@@ -4423,404 +4184,34 @@ components:
description:
type: string
fieldContext:
$ref: '#/components/schemas/TelemetrytypesFieldContext'
type: string
fieldDataType:
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
type: string
name:
type: string
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
type: string
unit:
type: string
type: object
Querybuildertypesv5OrderDirection:
enum:
- asc
- desc
type: string
Querybuildertypesv5PromQuery:
properties:
disabled:
type: boolean
legend:
type: string
name:
type: string
query:
type: string
stats:
type: boolean
step:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderFormula:
properties:
disabled:
type: boolean
expression:
type: string
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
name:
type: string
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
type: object
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5LogAggregation:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5LogAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
secondaryAggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
type: array
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
source:
$ref: '#/components/schemas/TelemetrytypesSource'
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5MetricAggregation:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5MetricAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
secondaryAggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
type: array
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
source:
$ref: '#/components/schemas/TelemetrytypesSource'
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5TraceAggregation:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5TraceAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
secondaryAggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
type: array
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
source:
$ref: '#/components/schemas/TelemetrytypesSource'
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryBuilderTraceOperator:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5TraceAggregation'
type: array
cursor:
type: string
disabled:
type: boolean
expression:
type: string
filter:
$ref: '#/components/schemas/Querybuildertypesv5Filter'
functions:
items:
$ref: '#/components/schemas/Querybuildertypesv5Function'
type: array
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
having:
$ref: '#/components/schemas/Querybuildertypesv5Having'
legend:
type: string
limit:
type: integer
name:
type: string
offset:
type: integer
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
returnSpansFrom:
type: string
selectFields:
items:
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
type: array
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5QueryData:
oneOf:
- $ref: '#/components/schemas/Querybuildertypesv5TimeSeriesData'
- $ref: '#/components/schemas/Querybuildertypesv5ScalarData'
- $ref: '#/components/schemas/Querybuildertypesv5RawData'
properties:
results:
items: {}
nullable: true
type: array
type: object
Querybuildertypesv5QueryEnvelope:
oneOf:
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderTrace'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderLog'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderMetric'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeFormula'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeTraceOperator'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopePromQL'
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeClickHouseSQL'
properties:
spec: {}
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeBuilderLog:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5LogAggregation'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeBuilderMetric:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5MetricAggregation'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeBuilderTrace:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5TraceAggregation'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeClickHouseSQL:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5ClickHouseQuery'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeFormula:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderFormula'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopePromQL:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5PromQuery'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryEnvelopeTraceOperator:
properties:
spec:
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderTraceOperator'
type:
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
type: object
Querybuildertypesv5QueryRangeRequest:
description: Request body for the v5 query range endpoint. Supports builder
queries (traces, logs, metrics), formulas, joins, trace operators, PromQL,
and ClickHouse SQL queries.
example:
compositeQuery:
queries:
- spec:
aggregations:
- alias: span_count
expression: count()
filter:
expression: service.name = 'frontend'
groupBy:
- fieldContext: resource
name: service.name
limit: 10
name: A
order:
- direction: desc
key:
name: span_count
signal: traces
stepInterval: 60s
type: builder_query
end: 1.6409988e+12
requestType: time_series
schemaVersion: v1
start: 1.6409952e+12
properties:
compositeQuery:
$ref: '#/components/schemas/Querybuildertypesv5CompositeQuery'
end:
minimum: 0
type: integer
formatOptions:
$ref: '#/components/schemas/Querybuildertypesv5FormatOptions'
noCache:
type: boolean
requestType:
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
schemaVersion:
type: string
start:
minimum: 0
type: integer
variables:
additionalProperties:
$ref: '#/components/schemas/Querybuildertypesv5VariableItem'
type: object
type: object
Querybuildertypesv5QueryRangeResponse:
description: 'Response from the v5 query range endpoint. The data.results array
contains typed results depending on the requestType: TimeSeriesData for time_series,
ScalarData for scalar, or RawData for raw requests.'
properties:
data:
$ref: '#/components/schemas/Querybuildertypesv5QueryData'
meta:
$ref: '#/components/schemas/Querybuildertypesv5ExecStats'
type:
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
type: string
warning:
$ref: '#/components/schemas/Querybuildertypesv5QueryWarnData'
type: object
Querybuildertypesv5QueryType:
enum:
- builder_query
- builder_formula
- builder_trace_operator
- clickhouse_sql
- promql
type: string
Querybuildertypesv5QueryWarnData:
properties:
message:
@@ -4837,153 +4228,6 @@ components:
message:
type: string
type: object
Querybuildertypesv5RawData:
properties:
nextCursor:
type: string
queryName:
type: string
rows:
items:
$ref: '#/components/schemas/Querybuildertypesv5RawRow'
nullable: true
type: array
type: object
Querybuildertypesv5RawRow:
properties:
data:
additionalProperties: {}
nullable: true
type: object
timestamp:
format: date-time
type: string
type: object
Querybuildertypesv5ReduceTo:
enum:
- sum
- count
- avg
- min
- max
- last
- median
type: string
Querybuildertypesv5RequestType:
enum:
- scalar
- time_series
- raw
- raw_stream
- trace
type: string
Querybuildertypesv5ScalarData:
properties:
columns:
items:
$ref: '#/components/schemas/Querybuildertypesv5ColumnDescriptor'
nullable: true
type: array
data:
items:
items: {}
type: array
nullable: true
type: array
queryName:
type: string
type: object
Querybuildertypesv5SecondaryAggregation:
properties:
alias:
type: string
expression:
type: string
groupBy:
items:
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
type: array
limit:
type: integer
limitBy:
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
order:
items:
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
type: array
stepInterval:
$ref: '#/components/schemas/Querybuildertypesv5Step'
type: object
Querybuildertypesv5Step:
description: Step interval. Accepts a Go duration string (e.g., "60s", "1m",
"1h") or a number representing seconds (e.g., 60).
oneOf:
- description: Duration string (e.g., "60s", "5m", "1h").
example: 60s
type: string
- description: Duration in seconds.
example: 60
type: number
Querybuildertypesv5TimeSeries:
properties:
labels:
items:
$ref: '#/components/schemas/Querybuildertypesv5Label'
type: array
values:
items:
$ref: '#/components/schemas/Querybuildertypesv5TimeSeriesValue'
nullable: true
type: array
type: object
Querybuildertypesv5TimeSeriesData:
properties:
aggregations:
items:
$ref: '#/components/schemas/Querybuildertypesv5AggregationBucket'
nullable: true
type: array
queryName:
type: string
type: object
Querybuildertypesv5TimeSeriesValue:
properties:
bucket:
$ref: '#/components/schemas/Querybuildertypesv5Bucket'
partial:
type: boolean
timestamp:
format: int64
type: integer
value:
format: double
type: number
values:
items:
format: double
type: number
type: array
type: object
Querybuildertypesv5TraceAggregation:
properties:
alias:
type: string
expression:
type: string
type: object
Querybuildertypesv5VariableItem:
properties:
type:
$ref: '#/components/schemas/Querybuildertypesv5VariableType'
value: {}
type: object
Querybuildertypesv5VariableType:
enum:
- query
- dynamic
- custom
- text
type: string
RenderErrorResponse:
properties:
error:
@@ -5010,48 +4254,6 @@ components:
format: date-time
type: string
type: object
TelemetrytypesFieldContext:
enum:
- metric
- log
- span
- resource
- attribute
- body
type: string
TelemetrytypesFieldDataType:
enum:
- string
- bool
- float64
- int64
- number
type: string
TelemetrytypesSignal:
enum:
- traces
- logs
- metrics
type: string
TelemetrytypesSource:
enum:
- meter
type: string
TelemetrytypesTelemetryFieldKey:
properties:
description:
type: string
fieldContext:
$ref: '#/components/schemas/TelemetrytypesFieldContext'
fieldDataType:
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
name:
type: string
signal:
$ref: '#/components/schemas/TelemetrytypesSignal'
unit:
type: string
type: object
TypesChangePasswordRequest:
properties:
newPassword:

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/apis/fields"
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/http/handler"
"github.com/SigNoz/signoz/pkg/http/middleware"
querierAPI "github.com/SigNoz/signoz/pkg/querier"
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
@@ -23,8 +22,6 @@ import (
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
"github.com/SigNoz/signoz/pkg/queryparser"
"github.com/SigNoz/signoz/pkg/signoz"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/version"
"github.com/gorilla/mux"
)
@@ -111,22 +108,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
// v5
router.Handle("/api/v5/query_range", handler.New(am.ViewAccess(ah.queryRangeV5), handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
})).Methods(http.MethodPost)
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)

View File

@@ -762,6 +762,18 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
totalTimeSeries: number;
}
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetricsexplorertypesMetricMetadataDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesMetricMetadataDTO {
/**
* @type string
@@ -771,14 +783,29 @@ export interface MetricsexplorertypesMetricMetadataDTO {
* @type boolean
*/
isMonotonic: boolean;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @enum delta,cumulative,unspecified
* @type string
*/
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesMetricMetadataDTOType;
/**
* @type string
*/
unit: string;
}
export enum MetricsexplorertypesStatDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesStatDTO {
/**
* @type string
@@ -798,7 +825,11 @@ export interface MetricsexplorertypesStatDTO {
* @minimum 0
*/
timeseries: number;
type: MetrictypesTypeDTO;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesStatDTOType;
/**
* @type string
*/
@@ -858,7 +889,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
totalValue: number;
}
export enum MetricsexplorertypesTreemapModeDTO {
export enum MetricsexplorertypesTreemapRequestDTOMode {
timeseries = 'timeseries',
samples = 'samples',
}
@@ -873,7 +904,11 @@ export interface MetricsexplorertypesTreemapRequestDTO {
* @type integer
*/
limit: number;
mode: MetricsexplorertypesTreemapModeDTO;
/**
* @enum timeseries,samples
* @type string
*/
mode: MetricsexplorertypesTreemapRequestDTOMode;
/**
* @type integer
* @format int64
@@ -894,6 +929,18 @@ export interface MetricsexplorertypesTreemapResponseDTO {
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
}
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
/**
* @type string
@@ -907,26 +954,22 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
* @type string
*/
metricName: string;
temporality: MetrictypesTemporalityDTO;
type: MetrictypesTypeDTO;
/**
* @enum delta,cumulative,unspecified
* @type string
*/
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
/**
* @enum gauge,sum,histogram,summary,exponentialhistogram
* @type string
*/
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
/**
* @type string
*/
unit: string;
}
export enum MetrictypesTemporalityDTO {
delta = 'delta',
cumulative = 'cumulative',
unspecified = 'unspecified',
}
export enum MetrictypesTypeDTO {
gauge = 'gauge',
sum = 'sum',
histogram = 'histogram',
summary = 'summary',
exponentialhistogram = 'exponentialhistogram',
}
export interface PreferencetypesPreferenceDTO {
/**
* @type array
@@ -1345,7 +1388,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
email: string;
email?: string;
/**
* @type string
*/
@@ -1353,7 +1396,7 @@ export interface TypesPostableForgotPasswordDTO {
/**
* @type string
*/
orgId: string;
orgId?: string;
}
export interface TypesPostableInviteDTO {

View File

@@ -2,7 +2,6 @@ package signoz
import (
"context"
"net/http"
"os"
"reflect"
@@ -24,8 +23,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/gorilla/mux"
"github.com/swaggest/jsonschema-go"
"github.com/swaggest/openapi-go"
"github.com/swaggest/openapi-go/openapi3"
@@ -60,10 +57,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
return nil, err
}
// Register routes that live outside the APIServer modules
// so they are discovered by the OpenAPI walker.
registerQueryRoutes(apiserver.Router())
reflector := openapi3.NewReflector()
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
if defaultDefName == "RenderSuccessResponse" {
@@ -104,25 +97,3 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
return os.WriteFile(path, spec, 0o600)
}
func registerQueryRoutes(router *mux.Router) {
router.Handle("/api/v5/query_range", handler.New(
func(http.ResponseWriter, *http.Request) {},
handler.OpenAPIDef{
ID: "QueryRangeV5",
Tags: []string{"query"},
Summary: "Query range",
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
Request: new(qbtypes.QueryRangeRequest),
RequestContentType: "application/json",
Response: new(qbtypes.QueryRangeResponse),
ResponseContentType: "application/json",
SuccessStatusCode: http.StatusOK,
ErrorStatusCodes: []int{http.StatusBadRequest},
SecuritySchemes: []handler.OpenAPISecurityScheme{
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
},
},
)).Methods(http.MethodPost)
}

View File

@@ -151,7 +151,8 @@ func (c *AccountConfig) Value() (driver.Value, error) {
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "couldn't serialize cloud account config to JSON")
}
return serialized, nil
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
}
type AgentReport struct {
@@ -186,7 +187,8 @@ func (r *AgentReport) Value() (driver.Value, error) {
err, errors.CodeInternal, "couldn't serialize agent report to JSON",
)
}
return serialized, nil
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
}
type CloudIntegrationService struct {
@@ -240,5 +242,6 @@ func (c *CloudServiceConfig) Value() (driver.Value, error) {
err, errors.CodeInternal, "couldn't serialize cloud service config to JSON",
)
}
return serialized, nil
// Return as string instead of []byte to ensure PostgreSQL stores as text, not bytea
return string(serialized), nil
}

View File

@@ -31,13 +31,6 @@ var (
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
)
func (TreemapMode) Enum() []any {
return []any{
TreemapModeTimeSeries,
TreemapModeSamples,
}
}
// StatsRequest represents the payload accepted by the metrics stats endpoint.
type StatsRequest struct {
Filter *qbtypes.Filter `json:"filter,omitempty"`
@@ -105,7 +98,7 @@ func (req *StatsRequest) UnmarshalJSON(data []byte) error {
type Stat struct {
MetricName string `json:"metricName" required:"true"`
Description string `json:"description" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
MetricUnit string `json:"unit" required:"true"`
TimeSeries uint64 `json:"timeseries" required:"true"`
Samples uint64 `json:"samples" required:"true"`
@@ -119,9 +112,9 @@ type StatsResponse struct {
type MetricMetadata struct {
Description string `json:"description" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true"`
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
MetricUnit string `json:"unit" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
IsMonotonic bool `json:"isMonotonic" required:"true"`
}
@@ -138,10 +131,10 @@ func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
type UpdateMetricMetadataRequest struct {
MetricName string `json:"metricName" required:"true"`
Type metrictypes.Type `json:"type" required:"true"`
Type metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
Description string `json:"description" required:"true"`
Unit string `json:"unit" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
IsMonotonic bool `json:"isMonotonic" required:"true"`
}
@@ -151,7 +144,7 @@ type TreemapRequest struct {
Start int64 `json:"start" required:"true"`
End int64 `json:"end" required:"true"`
Limit int `json:"limit" required:"true"`
Mode TreemapMode `json:"mode" required:"true"`
Mode TreemapMode `json:"mode" required:"true" enum:"timeseries,samples"`
}
// Validate enforces basic constraints on TreemapRequest.

View File

@@ -36,7 +36,7 @@ func (t Temporality) Value() (driver.Value, error) {
}
}
func (t *Temporality) Scan(src any) error {
func (t *Temporality) Scan(src interface{}) error {
if src == nil {
*t = Unknown
return nil
@@ -66,14 +66,6 @@ func (t *Temporality) Scan(src any) error {
return nil
}
func (Temporality) Enum() []any {
return []any{
Delta,
Cumulative,
Unspecified,
}
}
// Type is the type of the metric in OTLP data model
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
type Type struct {
@@ -142,16 +134,6 @@ var (
UnspecifiedType = Type{valuer.NewString("")}
)
func (Type) Enum() []any {
return []any{
GaugeType,
SumType,
HistogramType,
SummaryType,
ExpHistogramType,
}
}
type TimeAggregation struct {
valuer.String
}
@@ -169,21 +151,6 @@ var (
TimeAggregationIncrease = TimeAggregation{valuer.NewString("increase")}
)
func (TimeAggregation) Enum() []any {
return []any{
TimeAggregationUnspecified,
TimeAggregationLatest,
TimeAggregationSum,
TimeAggregationAvg,
TimeAggregationMin,
TimeAggregationMax,
TimeAggregationCount,
TimeAggregationCountDistinct,
TimeAggregationRate,
TimeAggregationIncrease,
}
}
type SpaceAggregation struct {
valuer.String
}
@@ -202,22 +169,6 @@ var (
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
)
func (SpaceAggregation) Enum() []any {
return []any{
SpaceAggregationUnspecified,
SpaceAggregationSum,
SpaceAggregationAvg,
SpaceAggregationMin,
SpaceAggregationMax,
SpaceAggregationCount,
SpaceAggregationPercentile50,
SpaceAggregationPercentile75,
SpaceAggregationPercentile90,
SpaceAggregationPercentile95,
SpaceAggregationPercentile99,
}
}
func (s SpaceAggregation) IsPercentile() bool {
return s == SpaceAggregationPercentile50 ||
s == SpaceAggregationPercentile75 ||

View File

@@ -1,707 +0,0 @@
package querybuildertypesv5
import (
"github.com/swaggest/jsonschema-go"
)
// Enum returns the acceptable values for QueryType.
func (QueryType) Enum() []any {
return []any{
QueryTypeBuilder,
QueryTypeFormula,
// Not yet supported.
// QueryTypeSubQuery,
// QueryTypeJoin,
QueryTypeTraceOperator,
QueryTypeClickHouseSQL,
QueryTypePromQL,
}
}
// Enum returns the acceptable values for RequestType.
func (RequestType) Enum() []any {
return []any{
RequestTypeScalar,
RequestTypeTimeSeries,
RequestTypeRaw,
RequestTypeRawStream,
RequestTypeTrace,
// RequestTypeDistribution,
}
}
// Enum returns the acceptable values for FunctionName.
func (FunctionName) Enum() []any {
return []any{
FunctionNameCutOffMin,
FunctionNameCutOffMax,
FunctionNameClampMin,
FunctionNameClampMax,
FunctionNameAbsolute,
FunctionNameRunningDiff,
FunctionNameLog2,
FunctionNameLog10,
FunctionNameCumulativeSum,
FunctionNameEWMA3,
FunctionNameEWMA5,
FunctionNameEWMA7,
FunctionNameMedian3,
FunctionNameMedian5,
FunctionNameMedian7,
FunctionNameTimeShift,
FunctionNameAnomaly,
FunctionNameFillZero,
}
}
// Enum returns the acceptable values for OrderDirection.
func (OrderDirection) Enum() []any {
return []any{
OrderDirectionAsc,
OrderDirectionDesc,
}
}
// Enum returns the acceptable values for ReduceTo.
func (ReduceTo) Enum() []any {
return []any{
ReduceToSum,
ReduceToCount,
ReduceToAvg,
ReduceToMin,
ReduceToMax,
ReduceToLast,
ReduceToMedian,
}
}
// Enum returns the acceptable values for VariableType.
func (VariableType) Enum() []any {
return []any{
QueryVariableType,
DynamicVariableType,
CustomVariableType,
TextBoxVariableType,
}
}
// Enum returns the acceptable values for JoinType.
func (JoinType) Enum() []any {
return []any{
JoinTypeInner,
JoinTypeLeft,
JoinTypeRight,
JoinTypeFull,
JoinTypeCross,
}
}
// Enum returns the acceptable values for ColumnType.
func (ColumnType) Enum() []any {
return []any{
ColumnTypeGroup,
ColumnTypeAggregation,
}
}
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
type queryEnvelopeBuilderTrace struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
}
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
type queryEnvelopeBuilderLog struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
}
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
type queryEnvelopeBuilderMetric struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
}
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
type queryEnvelopeFormula struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
}
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
// type queryEnvelopeJoin struct {
// Type QueryType `json:"type" description:"The type of the query."`
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
// }
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
type queryEnvelopeTraceOperator struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
}
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
type queryEnvelopePromQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
}
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
type queryEnvelopeClickHouseSQL struct {
Type QueryType `json:"type" description:"The type of the query."`
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
}
var _ jsonschema.OneOfExposer = QueryEnvelope{}
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
// Each variant represents a different query type with its corresponding spec schema.
func (QueryEnvelope) JSONSchemaOneOf() []any {
return []any{
queryEnvelopeBuilderTrace{},
queryEnvelopeBuilderLog{},
queryEnvelopeBuilderMetric{},
queryEnvelopeFormula{},
// queryEnvelopeJoin{},
queryEnvelopeTraceOperator{},
queryEnvelopePromQL{},
queryEnvelopeClickHouseSQL{},
}
}
var _ jsonschema.Exposer = Step{}
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
func (Step) JSONSchema() (jsonschema.Schema, error) {
s := jsonschema.Schema{}
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
strSchema := jsonschema.Schema{}
strSchema.WithType(jsonschema.String.Type())
strSchema.WithExamples("60s", "5m", "1h")
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
numSchema := jsonschema.Schema{}
numSchema.WithType(jsonschema.Number.Type())
numSchema.WithExamples(60, 300, 3600)
numSchema.WithDescription("Duration in seconds.")
s.OneOf = []jsonschema.SchemaOrBool{
strSchema.ToSchemaOrBool(),
numSchema.ToSchemaOrBool(),
}
return s, nil
}
var _ jsonschema.OneOfExposer = QueryData{}
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
func (QueryData) JSONSchemaOneOf() []any {
return []any{
TimeSeriesData{},
ScalarData{},
RawData{},
}
}
var _ jsonschema.Preparer = &QueryRangeRequest{}
// PrepareJSONSchema adds examples and description to the QueryRangeRequest schema.
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
schema.WithExamples(
// 1. time_series + traces builder: count spans grouped by service, ordered by count
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "span_count",
},
},
"stepInterval": "60s",
"filter": map[string]any{
"expression": "service.name = 'frontend'",
},
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "span_count"},
"direction": "desc",
},
},
"limit": 10,
},
},
},
},
},
// 2. time_series + logs builder: count logs grouped by service
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "log_count",
},
},
"stepInterval": "60s",
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "log_count"},
"direction": "desc",
},
},
"limit": 10,
},
},
},
},
},
// 3. time_series + metrics builder (Gauge): latest value averaged across series
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "system.cpu.utilization",
"timeAggregation": "latest",
"spaceAggregation": "avg",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "host.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 4. time_series + metrics builder (Sum): rate of cumulative counter
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.count",
"timeAggregation": "rate",
"spaceAggregation": "sum",
},
},
"stepInterval": 120,
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 5. time_series + metrics builder (Histogram): p99 latency
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.bucket",
"spaceAggregation": "p99",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
},
},
},
// 6. raw + logs builder: fetch raw log records
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
"selectFields": []any{
map[string]any{
"name": "body",
"fieldContext": "log",
},
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "timestamp", "fieldContext": "log"},
"direction": "desc",
},
map[string]any{
"key": map[string]any{"name": "id"},
"direction": "desc",
},
},
"limit": 50,
"offset": 0,
},
},
},
},
},
// 7. raw + traces builder: fetch raw span records
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"filter": map[string]any{
"expression": "service.name = 'frontend' AND has_error = true",
},
"selectFields": []any{
map[string]any{
"name": "name",
"fieldContext": "span",
},
map[string]any{
"name": "duration_nano",
"fieldContext": "span",
},
},
"order": []any{
map[string]any{
"key": map[string]any{"name": "timestamp", "fieldContext": "span"},
"direction": "desc",
},
},
"limit": 100,
},
},
},
},
},
// 8. scalar + traces builder: total span count as a single value
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "span_count",
},
},
"filter": map[string]any{
"expression": "service.name = 'frontend'",
},
},
},
},
},
},
// 9. scalar + logs builder: total error log count as a single value
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "logs",
"aggregations": []any{
map[string]any{
"expression": "count()",
"alias": "error_count",
},
},
"filter": map[string]any{
"expression": "severity_text = 'ERROR'",
},
},
},
},
},
},
// 10. scalar + metrics builder: single reduced value with reduceTo
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "metrics",
"aggregations": []any{
map[string]any{
"metricName": "http.server.duration.count",
"timeAggregation": "rate",
"spaceAggregation": "sum",
"reduceTo": "sum",
},
},
"stepInterval": "60s",
},
},
},
},
},
// 11. builder formula: error rate from two trace queries
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "A",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "countIf(has_error = true)",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
map[string]any{
"type": "builder_query",
"spec": map[string]any{
"name": "B",
"signal": "traces",
"aggregations": []any{
map[string]any{
"expression": "count()",
},
},
"stepInterval": "60s",
"groupBy": []any{
map[string]any{
"name": "service.name",
"fieldContext": "resource",
},
},
},
},
map[string]any{
"type": "builder_formula",
"spec": map[string]any{
"name": "error_rate",
"expression": "A / B * 100",
},
},
},
},
},
// 12. PromQL query with UTF-8 dot metric name
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "promql",
"spec": map[string]any{
"name": "request_rate",
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
"step": 60,
},
},
},
},
},
// 13. ClickHouse SQL — time_series
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "time_series",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "span_rate",
"query": "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime GROUP BY ts ORDER BY ts",
},
},
},
},
},
// 14. ClickHouse SQL — raw
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "raw",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "recent_errors",
"query": "SELECT timestamp, body FROM signoz_logs.distributed_logs_v2 WHERE timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano AND severity_text = 'ERROR' ORDER BY timestamp DESC LIMIT 100",
},
},
},
},
},
// 15. ClickHouse SQL — scalar
map[string]any{
"schemaVersion": "v1",
"start": 1640995200000,
"end": 1640998800000,
"requestType": "scalar",
"compositeQuery": map[string]any{
"queries": []any{
map[string]any{
"type": "clickhouse_sql",
"spec": map[string]any{
"name": "total_spans",
"query": "SELECT count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime",
},
},
},
},
},
)
return nil
}
var _ jsonschema.Preparer = &QueryRangeResponse{}
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
return nil
}
var _ jsonschema.Preparer = &CompositeQuery{}
// PrepareJSONSchema adds description to the CompositeQuery schema.
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
return nil
}
var _ jsonschema.Preparer = &ExecStats{}
// PrepareJSONSchema adds description to the ExecStats schema.
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
return nil
}

View File

@@ -1,48 +0,0 @@
package telemetrytypes
// Enum returns the acceptable values for Signal.
func (Signal) Enum() []any {
return []any{
SignalTraces,
SignalLogs,
SignalMetrics,
}
}
// Enum returns the acceptable values for FieldContext.
func (FieldContext) Enum() []any {
return []any{
FieldContextMetric,
FieldContextLog,
FieldContextSpan,
// FieldContextTrace,
FieldContextResource,
// FieldContextScope,
FieldContextAttribute,
// FieldContextEvent,
FieldContextBody,
}
}
// Enum returns the acceptable values for Source.
func (Source) Enum() []any {
return []any{
SourceMeter,
}
}
// Enum returns the acceptable values for FieldDataType.
func (FieldDataType) Enum() []any {
return []any{
FieldDataTypeString,
FieldDataTypeBool,
FieldDataTypeFloat64,
FieldDataTypeInt64,
FieldDataTypeNumber,
// FieldDataTypeArrayString,
// FieldDataTypeArrayFloat64,
// FieldDataTypeArrayBool,
// FieldDataTypeArrayInt64,
// FieldDataTypeArrayNumber,
}
}

View File

@@ -0,0 +1,206 @@
from http import HTTPStatus
from typing import Callable
import requests
from sqlalchemy import text
from wiremock.client import (
HttpMethods,
Mapping,
MappingRequest,
MappingResponse,
WireMockMatchers,
)
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def cleanup_cloud_accounts(postgres: types.TestContainerSQL) -> None:
try:
with postgres.conn.connect() as conn:
conn.execute(text("TRUNCATE TABLE cloud_integration CASCADE"))
conn.commit()
logger.info("Cleaned up cloud_integration table")
except Exception as e:
logger.info(f"Cleanup skipped: {str(e)[:100]}")
def test_generate_connection_url(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test to generate connection URL for AWS CloudFormation stack deployment."""
# Clean up any corrupted data from previous test runs
cleanup_cloud_accounts(postgres)
# Get authentication token for admin user
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
# Mock the deployment info query (for license validation)
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
# Prepare request payload
request_payload = {
"account_config": {"regions": ["us-east-1", "us-west-2"]},
"agent_config": {
"region": "us-east-1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
"version": "v0.0.8",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
# Assert successful response
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}: {response.text}"
# Parse response JSON
response_data = response.json()
# Assert response structure contains expected data
assert "data" in response_data, "Response should contain 'data' field"
# Assert required fields in the response data
expected_fields = ["account_id", "connection_url"]
for field in expected_fields:
assert (
field in response_data["data"]
), f"Response data should contain '{field}' field"
data = response_data["data"]
# Assert account_id is a valid UUID format
assert (
len(data["account_id"]) > 0
), "account_id should be a non-empty string (UUID)"
# Assert connection_url contains expected CloudFormation parameters
connection_url = data["connection_url"]
# Verify it's an AWS CloudFormation URL
assert (
"console.aws.amazon.com/cloudformation" in connection_url
), "connection_url should be an AWS CloudFormation URL"
# Verify region is included
assert (
"region=us-east-1" in connection_url
), "connection_url should contain the specified region"
# Verify required parameters are in the URL
required_params = [
"param_SigNozIntegrationAgentVersion=v0.0.8",
"param_SigNozApiUrl=https%3A%2F%2Ftest-deployment.test.signoz.cloud",
"param_SigNozApiKey=test-api-key-789",
"param_SigNozAccountId=", # Will be a UUID
"param_IngestionUrl=https%3A%2F%2Fingest.test.signoz.cloud",
"param_IngestionKey=test-ingestion-key-123456",
"stackName=signoz-integration",
"templateURL=https%3A%2F%2Fsignoz-integrations.s3.us-east-1.amazonaws.com%2Faws-quickcreate-template-v0.0.8.json",
]
for param in required_params:
assert (
param in connection_url
), f"connection_url should contain parameter: {param}"
logger.info("Connection URL generated successfully")
logger.info(f"Account ID: {data['account_id']}")
logger.info(f"Connection URL length: {len(connection_url)} characters")
def test_generate_connection_url_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
) -> None:
"""Test that unsupported cloud providers return an error."""
# Get authentication token for admin user
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Try with GCP (unsupported)
cloud_provider = "gcp"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
request_payload = {
"account_config": {"regions": ["us-central1"]},
"agent_config": {
"region": "us-central1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
# Should return Bad Request for unsupported provider
assert (
response.status_code == HTTPStatus.BAD_REQUEST
), f"Expected 400 for unsupported provider, got {response.status_code}"
response_data = response.json()
assert "error" in response_data, "Response should contain 'error' field"
assert (
"unsupported cloud provider" in response_data["error"].lower()
), "Error message should indicate unsupported provider"
logger.info("Unsupported provider correctly rejected with 400 Bad Request")

View File

@@ -0,0 +1,590 @@
from http import HTTPStatus
from typing import Callable
import uuid
import pytest
import requests
from sqlalchemy import text
from wiremock.client import (
HttpMethods,
Mapping,
MappingRequest,
MappingResponse,
WireMockMatchers,
)
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def cleanup_cloud_accounts(postgres: types.TestContainerSQL) -> None:
try:
with postgres.conn.connect() as conn:
# Try to delete all records instead of truncate in case table exists
conn.execute(text("DELETE FROM cloud_integration"))
conn.commit()
logger.info("Cleaned up cloud_integration table")
except Exception as e:
# Table might not exist, which is fine
logger.info(f"Cleanup skipped or partial: {str(e)[:100]}")
def generate_unique_cloud_account_id() -> str:
"""Generate a unique cloud account ID for testing."""
# Use last 12 digits of UUID to simulate AWS account ID format
return str(uuid.uuid4().int)[:12]
def simulate_agent_checkin(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str,
account_id: str,
cloud_account_id: str,
) -> dict:
"""Simulate an agent check-in to mark the account as connected.
Returns:
dict with the response from check-in
"""
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/agent-check-in"
)
checkin_payload = {
"account_id": account_id,
"cloud_account_id": cloud_account_id,
"data": {},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=checkin_payload,
timeout=10,
)
if response.status_code != HTTPStatus.OK:
logger.error(f"Agent check-in failed: {response.status_code}, response: {response.text}")
assert (
response.status_code == HTTPStatus.OK
), f"Agent check-in failed: {response.status_code}"
logger.info(f"Agent check-in completed for account: {account_id}")
response_data = response.json()
return response_data.get("data", response_data)
def create_test_account(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str = "aws",
) -> dict:
"""Create a test account via generate-connection-url.
Returns the data as-is from the API response. Caller is responsible for
doing agent check-in if needed to mark the account as connected.
Returns:
dict with account_id and connection_url from the API
"""
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
)
request_payload = {
"account_config": {"regions": ["us-east-1"]},
"agent_config": {
"region": "us-east-1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
"version": "v0.0.8",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Failed to create test account: {response.status_code}"
response_data = response.json()
# API returns data wrapped in {'status': 'success', 'data': {...}}
data = response_data.get("data", response_data)
return data
def test_list_connected_accounts_empty(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing connected accounts when there are none."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
# API returns data wrapped in {'status': 'success', 'data': {...}}
data = response_data.get("data", response_data)
assert "accounts" in data, "Response should contain 'accounts' field"
assert isinstance(data["accounts"], list), "Accounts should be a list"
# Note: If table doesn't exist yet, cleanup won't work and there might be leftover data
# This is acceptable for integration tests with --reuse flag
initial_count = len(data["accounts"])
logger.info(f"Accounts list returned successfully with {initial_count} existing account(s)")
def test_list_connected_accounts_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing connected accounts after creating one."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Simulate agent check-in to mark as connected
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
# List accounts
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "accounts" in data, "Response should contain 'accounts' field"
assert isinstance(data["accounts"], list), "Accounts should be a list"
# Find our account in the list (there may be leftover accounts from previous test runs)
account = next((a for a in data["accounts"] if a["id"] == account_id), None)
assert account is not None, f"Account {account_id} should be found in list"
assert account["id"] == account_id, "Account ID should match"
assert "config" in account, "Account should have config field"
assert "status" in account, "Account should have status field"
logger.info(f"Found account in list: {account_id}")
def test_get_account_status(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting the status of a specific account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account (no check-in needed for status check)
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Get account status
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/status"
)
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "id" in data, "Response should contain 'id' field"
assert data["id"] == account_id, "Account ID should match"
assert "status" in data, "Response should contain 'status' field"
assert "integration" in data["status"], "Status should contain 'integration' field"
logger.info(f"Retrieved status for account: {account_id}")
def test_get_account_status_not_found(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting status for a non-existent account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
fake_account_id = "00000000-0000-0000-0000-000000000000"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{fake_account_id}/status"
)
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404, got {response.status_code}"
logger.info("Non-existent account correctly returned 404")
def test_update_account_config(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating account configuration."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Simulate agent check-in to mark as connected
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
# Update account configuration
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/config"
)
updated_config = {
"config": {"regions": ["us-east-1", "us-west-2", "eu-west-1"]}
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=updated_config,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "id" in data, "Response should contain 'id' field"
assert data["id"] == account_id, "Account ID should match"
# Verify the update by listing accounts
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_response_data = list_response.json()
list_data = list_response_data.get("data", list_response_data)
account = next((a for a in list_data["accounts"] if a["id"] == account_id), None)
assert account is not None, "Account should be found in list"
assert "config" in account, "Account should have config"
assert "regions" in account["config"], "Config should have regions"
assert len(account["config"]["regions"]) == 3, "Should have 3 regions"
assert set(account["config"]["regions"]) == {
"us-east-1",
"us-west-2",
"eu-west-1",
}, "Regions should match updated config"
logger.info(f"Updated account configuration: {account_id}")
def test_disconnect_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test disconnecting an account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
# Simulate agent check-in to mark as connected
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
# Disconnect the account
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/disconnect"
)
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
# Verify account is no longer in the connected list
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_response_data = list_response.json()
list_data = list_response_data.get("data", list_response_data)
assert len(list_data["accounts"]) == 0, "Should have no connected accounts"
logger.info(f"Disconnected account: {account_id}")
def test_disconnect_account_not_found(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test disconnecting a non-existent account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
fake_account_id = "00000000-0000-0000-0000-000000000000"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{fake_account_id}/disconnect"
)
response = requests.delete(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404, got {response.status_code}"
logger.info("Disconnect non-existent account correctly returned 404")
def test_list_accounts_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing accounts for an unsupported cloud provider."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "gcp" # Unsupported provider
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.BAD_REQUEST
), f"Expected 400, got {response.status_code}"
logger.info("Unsupported provider correctly rejected with 400 Bad Request")

View File

@@ -0,0 +1,787 @@
from http import HTTPStatus
from typing import Callable
import uuid
import requests
from sqlalchemy import text
from wiremock.client import (
HttpMethods,
Mapping,
MappingRequest,
MappingResponse,
WireMockMatchers,
)
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, add_license
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def cleanup_cloud_accounts(postgres: types.TestContainerSQL) -> None:
"""Clean up cloud_integration table to avoid corrupted data issues."""
try:
with postgres.conn.connect() as conn:
# Try to delete all records instead of truncate in case table exists
conn.execute(text("DELETE FROM cloud_integration"))
conn.commit()
logger.info("Cleaned up cloud_integration table")
except Exception as e:
# Table might not exist, which is fine
logger.info(f"Cleanup skipped or partial: {str(e)[:100]}")
def generate_unique_cloud_account_id() -> str:
"""Generate a unique cloud account ID for testing."""
# Use last 12 digits of UUID to simulate AWS account ID format
return str(uuid.uuid4().int)[:12]
def simulate_agent_checkin(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str,
account_id: str,
cloud_account_id: str,
) -> dict:
"""Simulate an agent check-in to mark the account as connected.
Returns:
dict with the response from check-in
"""
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/agent-check-in"
checkin_payload = {
"account_id": account_id,
"cloud_account_id": cloud_account_id,
"data": {},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=checkin_payload,
timeout=10,
)
if response.status_code != HTTPStatus.OK:
logger.error(
f"Agent check-in failed: {response.status_code}, response: {response.text}"
)
assert (
response.status_code == HTTPStatus.OK
), f"Agent check-in failed: {response.status_code}"
logger.info(f"Agent check-in completed for account: {account_id}")
response_data = response.json()
return response_data.get("data", response_data)
def create_test_account(
signoz: types.SigNoz,
admin_token: str,
cloud_provider: str = "aws",
) -> dict:
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
request_payload = {
"account_config": {"regions": ["us-east-1"]},
"agent_config": {
"region": "us-east-1",
"ingestion_url": "https://ingest.test.signoz.cloud",
"ingestion_key": "test-ingestion-key-123456",
"signoz_api_url": "https://test-deployment.test.signoz.cloud",
"signoz_api_key": "test-api-key-789",
"version": "v0.0.8",
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=request_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Failed to create test account: {response.status_code}"
response_data = response.json()
# API returns data wrapped in {'status': 'success', 'data': {...}}
data = response_data.get("data", response_data)
return data
def test_list_services_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing available services without specifying an account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "services" in data, "Response should contain 'services' field"
assert isinstance(data["services"], list), "Services should be a list"
assert len(data["services"]) > 0, "Should have at least one service available"
# Verify service structure
service = data["services"][0]
assert "id" in service, "Service should have 'id' field"
assert "title" in service, "Service should have 'title' field"
assert "icon" in service, "Service should have 'icon' field"
def test_list_services_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing services for a specific connected account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# List services for the account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services?cloud_account_id={cloud_account_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
assert "services" in data, "Response should contain 'services' field"
assert isinstance(data["services"], list), "Services should be a list"
assert len(data["services"]) > 0, "Should have at least one service available"
# Services should include config field (may be null if not configured)
service = data["services"][0]
assert "id" in service, "Service should have 'id' field"
assert "title" in service, "Service should have 'title' field"
assert "icon" in service, "Service should have 'icon' field"
logger.info(
f"Listed {len(data['services'])} services for account {cloud_account_id}"
)
def test_get_service_details_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting service details without specifying an account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
# First get the list of services to get a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
assert len(list_data["services"]) > 0, "Should have at least one service"
service_id = list_data["services"][0]["id"]
# Get service details
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
# Verify service details structure
assert "id" in data, "Service details should have 'id' field"
assert data["id"] == service_id, "Service ID should match requested ID"
assert "title" in data, "Service details should have 'name' field"
assert "overview" in data, "Service details should have 'overview' field"
# assert assets to had list of dashboards
assert "assets" in data, "Service details should have 'assets' field"
assert isinstance(data["assets"], dict), "Assets should be a dictionary"
logger.info(f"Retrieved details for service: {service_id}")
def test_get_service_details_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting service details for a specific connected account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get list of services first
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
assert len(list_data["services"]) > 0, "Should have at least one service"
service_id = list_data["services"][0]["id"]
# Get service details with account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}?cloud_account_id={cloud_account_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
# Verify service details structure
assert "id" in data, "Service details should have 'id' field"
assert data["id"] == service_id, "Service ID should match requested ID"
assert "title" in data, "Service details should have 'title' field"
assert "overview" in data, "Service details should have 'overview' field"
assert "assets" in data, "Service details should have 'assets' field"
assert "config" in data, "Service details should have 'config' field"
assert "status" in data, "Config should have 'status' field"
logger.info(
f"Retrieved details for service {service_id} with account {cloud_account_id}"
)
def test_get_service_details_invalid_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test getting details for a non-existent service."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
fake_service_id = "non-existent-service"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{fake_service_id}"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404, got {response.status_code}"
logger.info("Non-existent service correctly returned 404")
def test_list_services_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test listing services for an unsupported cloud provider."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "gcp" # Unsupported provider
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
response = requests.get(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
assert (
response.status_code == HTTPStatus.BAD_REQUEST
), f"Expected 400, got {response.status_code}"
logger.info(
"Unsupported provider correctly rejected with 400 Bad Request for services"
)
def test_update_service_config(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating service configuration for a connected account."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get list of services to pick a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
assert len(list_data["services"]) > 0, "Should have at least one service"
service_id = list_data["services"][0]["id"]
# Update service configuration
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
config_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": True},
"logs": {"enabled": True},
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=config_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.OK
), f"Expected 200, got {response.status_code}"
response_data = response.json()
data = response_data.get("data", response_data)
# Verify response structure
assert "id" in data, "Response should contain 'id' field"
assert data["id"] == service_id, "Service ID should match"
assert "config" in data, "Response should contain 'config' field"
assert "metrics" in data["config"], "Config should contain 'metrics' field"
assert "logs" in data["config"], "Config should contain 'logs' field"
logger.info(f"Updated service config for {service_id} on account {cloud_account_id}")
def test_update_service_config_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating service config without a connected account should fail."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
cloud_provider = "aws"
# Get a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
service_id = list_data["services"][0]["id"]
# Try to update config with non-existent account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
fake_cloud_account_id = generate_unique_cloud_account_id()
config_payload = {
"cloud_account_id": fake_cloud_account_id,
"config": {
"metrics": {"enabled": True},
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=config_payload,
timeout=10,
)
# todo: improve the handler logic to return 500
assert (
response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR
), f"Expected 500 for non-existent account, got {response.status_code}"
logger.info("Update service config correctly rejected for non-existent account")
def test_update_service_config_invalid_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test updating config for a non-existent service should fail."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Try to update config for invalid service
fake_service_id = "non-existent-service"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{fake_service_id}/config"
config_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": True},
},
}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=config_payload,
timeout=10,
)
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404 for invalid service, got {response.status_code}"
logger.info("Update service config correctly rejected for invalid service")
def test_update_service_config_disable_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
make_http_mocks: Callable[[types.TestContainerDocker, list], None],
get_token: Callable[[str, str], str],
postgres: types.TestContainerSQL,
) -> None:
"""Test disabling a service by updating config with enabled=false."""
cleanup_cloud_accounts(postgres)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
add_license(signoz, make_http_mocks, get_token)
# Mock the deployment info query
make_http_mocks(
signoz.zeus,
[
Mapping(
request=MappingRequest(
method=HttpMethods.GET,
url="/v2/deployments/me",
headers={
"X-Signoz-Cloud-Api-Key": {
WireMockMatchers.EQUAL_TO: "secret-key"
}
},
),
response=MappingResponse(
status=200,
json_body={
"status": "success",
"data": {
"name": "test-deployment",
"cluster": {"region": {"dns": "test.signoz.cloud"}},
},
},
),
persistent=False,
)
],
)
# Create a test account and do check-in
cloud_provider = "aws"
account_data = create_test_account(signoz, admin_token, cloud_provider)
account_id = account_data["account_id"]
cloud_account_id = generate_unique_cloud_account_id()
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get a valid service
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
list_response = requests.get(
signoz.self.host_configs["8080"].get(list_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=10,
)
list_data = list_response.json().get("data", list_response.json())
service_id = list_data["services"][0]["id"]
# First enable the service
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
enable_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": True},
},
}
enable_response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=enable_payload,
timeout=10,
)
assert enable_response.status_code == HTTPStatus.OK, "Failed to enable service"
# Now disable the service
disable_payload = {
"cloud_account_id": cloud_account_id,
"config": {
"metrics": {"enabled": False},
"logs": {"enabled": False},
},
}
disable_response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
headers={"Authorization": f"Bearer {admin_token}"},
json=disable_payload,
timeout=10,
)
assert (
disable_response.status_code == HTTPStatus.OK
), f"Expected 200, got {disable_response.status_code}"
response_data = disable_response.json()
data = response_data.get("data", response_data)
# Verify service is disabled
assert data["config"]["metrics"]["enabled"] == False, "Metrics should be disabled"
assert data["config"]["logs"]["enabled"] == False, "Logs should be disabled"
logger.info(f"Successfully disabled service {service_id}")