mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-09 03:02:20 +00:00
Compare commits
17 Commits
query-rang
...
bug/preser
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
61862b2db5 | ||
|
|
fd84814094 | ||
|
|
8bff18b545 | ||
|
|
1189130b9b | ||
|
|
e0715986a0 | ||
|
|
280b94f082 | ||
|
|
92ab827f44 | ||
|
|
27bb7a901d | ||
|
|
2399180f82 | ||
|
|
b1e7afc690 | ||
|
|
2d9ff52505 | ||
|
|
87ddf1e24a | ||
|
|
bf49e0595c | ||
|
|
6fbd111a3f | ||
|
|
89f09355de | ||
|
|
14f1f4ad28 | ||
|
|
77ddf31a93 |
@@ -3354,62 +3354,6 @@ paths:
|
||||
summary: Rotate session
|
||||
tags:
|
||||
- sessions
|
||||
/api/v5/query_range:
|
||||
post:
|
||||
deprecated: false
|
||||
description: Execute a composite query over a time range. Supports builder queries
|
||||
(traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse
|
||||
SQL.
|
||||
operationId: QueryRangeV5
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeRequest'
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryRangeResponse'
|
||||
status:
|
||||
type: string
|
||||
type: object
|
||||
description: OK
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Bad Request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Unauthorized
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Forbidden
|
||||
"500":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/RenderErrorResponse'
|
||||
description: Internal Server Error
|
||||
security:
|
||||
- api_key:
|
||||
- VIEWER
|
||||
- tokenizer:
|
||||
- VIEWER
|
||||
summary: Query range
|
||||
tags:
|
||||
- query
|
||||
components:
|
||||
schemas:
|
||||
AuthtypesAttributeMapping:
|
||||
@@ -3984,9 +3928,19 @@ components:
|
||||
isMonotonic:
|
||||
type: boolean
|
||||
temporality:
|
||||
$ref: '#/components/schemas/MetrictypesTemporality'
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4009,7 +3963,13 @@ components:
|
||||
minimum: 0
|
||||
type: integer
|
||||
type:
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4070,11 +4030,6 @@ components:
|
||||
- percentage
|
||||
- totalValue
|
||||
type: object
|
||||
MetricsexplorertypesTreemapMode:
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
MetricsexplorertypesTreemapRequest:
|
||||
properties:
|
||||
end:
|
||||
@@ -4085,7 +4040,10 @@ components:
|
||||
limit:
|
||||
type: integer
|
||||
mode:
|
||||
$ref: '#/components/schemas/MetricsexplorertypesTreemapMode'
|
||||
enum:
|
||||
- timeseries
|
||||
- samples
|
||||
type: string
|
||||
start:
|
||||
format: int64
|
||||
type: integer
|
||||
@@ -4120,9 +4078,19 @@ components:
|
||||
metricName:
|
||||
type: string
|
||||
temporality:
|
||||
$ref: '#/components/schemas/MetrictypesTemporality'
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
type:
|
||||
$ref: '#/components/schemas/MetrictypesType'
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
required:
|
||||
@@ -4133,20 +4101,6 @@ components:
|
||||
- temporality
|
||||
- isMonotonic
|
||||
type: object
|
||||
MetrictypesTemporality:
|
||||
enum:
|
||||
- delta
|
||||
- cumulative
|
||||
- unspecified
|
||||
type: string
|
||||
MetrictypesType:
|
||||
enum:
|
||||
- gauge
|
||||
- sum
|
||||
- histogram
|
||||
- summary
|
||||
- exponentialhistogram
|
||||
type: string
|
||||
PreferencetypesPreference:
|
||||
properties:
|
||||
allowedScopes:
|
||||
@@ -4196,101 +4150,7 @@ components:
|
||||
type:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5AggregationBucket:
|
||||
properties:
|
||||
alias:
|
||||
type: string
|
||||
anomalyScores:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
|
||||
type: array
|
||||
index:
|
||||
type: integer
|
||||
lowerBoundSeries:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
|
||||
type: array
|
||||
meta:
|
||||
properties:
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
predictedSeries:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
|
||||
type: array
|
||||
series:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
|
||||
nullable: true
|
||||
type: array
|
||||
upperBoundSeries:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TimeSeries'
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5Bucket:
|
||||
properties:
|
||||
step:
|
||||
format: double
|
||||
type: number
|
||||
type: object
|
||||
Querybuildertypesv5ClickHouseQuery:
|
||||
properties:
|
||||
disabled:
|
||||
type: boolean
|
||||
legend:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
query:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5ColumnDescriptor:
|
||||
properties:
|
||||
aggregationIndex:
|
||||
format: int64
|
||||
type: integer
|
||||
columnType:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5ColumnType'
|
||||
description:
|
||||
type: string
|
||||
fieldContext:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldContext'
|
||||
fieldDataType:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
|
||||
meta:
|
||||
properties:
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
name:
|
||||
type: string
|
||||
queryName:
|
||||
type: string
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5ColumnType:
|
||||
enum:
|
||||
- group
|
||||
- aggregation
|
||||
type: string
|
||||
Querybuildertypesv5CompositeQuery:
|
||||
description: Composite query containing one or more query envelopes. Each query
|
||||
envelope specifies its type and corresponding spec.
|
||||
properties:
|
||||
queries:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryEnvelope'
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5ExecStats:
|
||||
description: Execution statistics for the query, including rows scanned, bytes
|
||||
scanned, and duration.
|
||||
properties:
|
||||
bytesScanned:
|
||||
minimum: 0
|
||||
@@ -4312,109 +4172,10 @@ components:
|
||||
expression:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5FormatOptions:
|
||||
properties:
|
||||
fillGaps:
|
||||
type: boolean
|
||||
formatTableResultForUI:
|
||||
type: boolean
|
||||
type: object
|
||||
Querybuildertypesv5Function:
|
||||
properties:
|
||||
args:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5FunctionArg'
|
||||
type: array
|
||||
name:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5FunctionName'
|
||||
type: object
|
||||
Querybuildertypesv5FunctionArg:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
value: {}
|
||||
type: object
|
||||
Querybuildertypesv5FunctionName:
|
||||
enum:
|
||||
- cutoffmin
|
||||
- cutoffmax
|
||||
- clampmin
|
||||
- clampmax
|
||||
- absolute
|
||||
- runningdiff
|
||||
- log2
|
||||
- log10
|
||||
- cumulativesum
|
||||
- ewma3
|
||||
- ewma5
|
||||
- ewma7
|
||||
- median3
|
||||
- median5
|
||||
- median7
|
||||
- timeshift
|
||||
- anomaly
|
||||
- fillzero
|
||||
type: string
|
||||
Querybuildertypesv5GroupByKey:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
fieldContext:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldContext'
|
||||
fieldDataType:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
|
||||
name:
|
||||
type: string
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5Having:
|
||||
properties:
|
||||
expression:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5Label:
|
||||
properties:
|
||||
key:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
value: {}
|
||||
type: object
|
||||
Querybuildertypesv5LimitBy:
|
||||
properties:
|
||||
keys:
|
||||
items:
|
||||
type: string
|
||||
nullable: true
|
||||
type: array
|
||||
value:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5LogAggregation:
|
||||
properties:
|
||||
alias:
|
||||
type: string
|
||||
expression:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5MetricAggregation:
|
||||
properties:
|
||||
metricName:
|
||||
type: string
|
||||
reduceTo:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5ReduceTo'
|
||||
spaceAggregation:
|
||||
type: string
|
||||
temporality:
|
||||
type: string
|
||||
timeAggregation:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5OrderBy:
|
||||
properties:
|
||||
direction:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderDirection'
|
||||
type: string
|
||||
key:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderByKey'
|
||||
type: object
|
||||
@@ -4423,404 +4184,34 @@ components:
|
||||
description:
|
||||
type: string
|
||||
fieldContext:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldContext'
|
||||
type: string
|
||||
fieldDataType:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
type: string
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5OrderDirection:
|
||||
enum:
|
||||
- asc
|
||||
- desc
|
||||
type: string
|
||||
Querybuildertypesv5PromQuery:
|
||||
properties:
|
||||
disabled:
|
||||
type: boolean
|
||||
legend:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
query:
|
||||
type: string
|
||||
stats:
|
||||
type: boolean
|
||||
step:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Step'
|
||||
type: object
|
||||
Querybuildertypesv5QueryBuilderFormula:
|
||||
properties:
|
||||
disabled:
|
||||
type: boolean
|
||||
expression:
|
||||
type: string
|
||||
functions:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Function'
|
||||
type: array
|
||||
having:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Having'
|
||||
legend:
|
||||
type: string
|
||||
limit:
|
||||
type: integer
|
||||
name:
|
||||
type: string
|
||||
order:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5LogAggregation:
|
||||
properties:
|
||||
aggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5LogAggregation'
|
||||
type: array
|
||||
cursor:
|
||||
type: string
|
||||
disabled:
|
||||
type: boolean
|
||||
filter:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
functions:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Function'
|
||||
type: array
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
type: array
|
||||
having:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Having'
|
||||
legend:
|
||||
type: string
|
||||
limit:
|
||||
type: integer
|
||||
limitBy:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
|
||||
name:
|
||||
type: string
|
||||
offset:
|
||||
type: integer
|
||||
order:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
secondaryAggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
|
||||
type: array
|
||||
selectFields:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
source:
|
||||
$ref: '#/components/schemas/TelemetrytypesSource'
|
||||
stepInterval:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Step'
|
||||
type: object
|
||||
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5MetricAggregation:
|
||||
properties:
|
||||
aggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5MetricAggregation'
|
||||
type: array
|
||||
cursor:
|
||||
type: string
|
||||
disabled:
|
||||
type: boolean
|
||||
filter:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
functions:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Function'
|
||||
type: array
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
type: array
|
||||
having:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Having'
|
||||
legend:
|
||||
type: string
|
||||
limit:
|
||||
type: integer
|
||||
limitBy:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
|
||||
name:
|
||||
type: string
|
||||
offset:
|
||||
type: integer
|
||||
order:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
secondaryAggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
|
||||
type: array
|
||||
selectFields:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
source:
|
||||
$ref: '#/components/schemas/TelemetrytypesSource'
|
||||
stepInterval:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Step'
|
||||
type: object
|
||||
Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5TraceAggregation:
|
||||
properties:
|
||||
aggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TraceAggregation'
|
||||
type: array
|
||||
cursor:
|
||||
type: string
|
||||
disabled:
|
||||
type: boolean
|
||||
filter:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
functions:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Function'
|
||||
type: array
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
type: array
|
||||
having:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Having'
|
||||
legend:
|
||||
type: string
|
||||
limit:
|
||||
type: integer
|
||||
limitBy:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
|
||||
name:
|
||||
type: string
|
||||
offset:
|
||||
type: integer
|
||||
order:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
secondaryAggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5SecondaryAggregation'
|
||||
type: array
|
||||
selectFields:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
source:
|
||||
$ref: '#/components/schemas/TelemetrytypesSource'
|
||||
stepInterval:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Step'
|
||||
type: object
|
||||
Querybuildertypesv5QueryBuilderTraceOperator:
|
||||
properties:
|
||||
aggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TraceAggregation'
|
||||
type: array
|
||||
cursor:
|
||||
type: string
|
||||
disabled:
|
||||
type: boolean
|
||||
expression:
|
||||
type: string
|
||||
filter:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Filter'
|
||||
functions:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Function'
|
||||
type: array
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
type: array
|
||||
having:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Having'
|
||||
legend:
|
||||
type: string
|
||||
limit:
|
||||
type: integer
|
||||
name:
|
||||
type: string
|
||||
offset:
|
||||
type: integer
|
||||
order:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
returnSpansFrom:
|
||||
type: string
|
||||
selectFields:
|
||||
items:
|
||||
$ref: '#/components/schemas/TelemetrytypesTelemetryFieldKey'
|
||||
type: array
|
||||
stepInterval:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Step'
|
||||
type: object
|
||||
Querybuildertypesv5QueryData:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5TimeSeriesData'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5ScalarData'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5RawData'
|
||||
properties:
|
||||
results:
|
||||
items: {}
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelope:
|
||||
oneOf:
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderTrace'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderLog'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeBuilderMetric'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeFormula'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeTraceOperator'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopePromQL'
|
||||
- $ref: '#/components/schemas/Querybuildertypesv5QueryEnvelopeClickHouseSQL'
|
||||
properties:
|
||||
spec: {}
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopeBuilderLog:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5LogAggregation'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopeBuilderMetric:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5MetricAggregation'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopeBuilderTrace:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderQueryGithubComSigNozSignozPkgTypesQuerybuildertypesQuerybuildertypesv5TraceAggregation'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopeClickHouseSQL:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5ClickHouseQuery'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopeFormula:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderFormula'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopePromQL:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5PromQuery'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryEnvelopeTraceOperator:
|
||||
properties:
|
||||
spec:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryBuilderTraceOperator'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryType'
|
||||
type: object
|
||||
Querybuildertypesv5QueryRangeRequest:
|
||||
description: Request body for the v5 query range endpoint. Supports builder
|
||||
queries (traces, logs, metrics), formulas, joins, trace operators, PromQL,
|
||||
and ClickHouse SQL queries.
|
||||
example:
|
||||
compositeQuery:
|
||||
queries:
|
||||
- spec:
|
||||
aggregations:
|
||||
- alias: span_count
|
||||
expression: count()
|
||||
filter:
|
||||
expression: service.name = 'frontend'
|
||||
groupBy:
|
||||
- fieldContext: resource
|
||||
name: service.name
|
||||
limit: 10
|
||||
name: A
|
||||
order:
|
||||
- direction: desc
|
||||
key:
|
||||
name: span_count
|
||||
signal: traces
|
||||
stepInterval: 60s
|
||||
type: builder_query
|
||||
end: 1.6409988e+12
|
||||
requestType: time_series
|
||||
schemaVersion: v1
|
||||
start: 1.6409952e+12
|
||||
properties:
|
||||
compositeQuery:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5CompositeQuery'
|
||||
end:
|
||||
minimum: 0
|
||||
type: integer
|
||||
formatOptions:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5FormatOptions'
|
||||
noCache:
|
||||
type: boolean
|
||||
requestType:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
|
||||
schemaVersion:
|
||||
type: string
|
||||
start:
|
||||
minimum: 0
|
||||
type: integer
|
||||
variables:
|
||||
additionalProperties:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5VariableItem'
|
||||
type: object
|
||||
type: object
|
||||
Querybuildertypesv5QueryRangeResponse:
|
||||
description: 'Response from the v5 query range endpoint. The data.results array
|
||||
contains typed results depending on the requestType: TimeSeriesData for time_series,
|
||||
ScalarData for scalar, or RawData for raw requests.'
|
||||
properties:
|
||||
data:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryData'
|
||||
meta:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5ExecStats'
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5RequestType'
|
||||
type: string
|
||||
warning:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5QueryWarnData'
|
||||
type: object
|
||||
Querybuildertypesv5QueryType:
|
||||
enum:
|
||||
- builder_query
|
||||
- builder_formula
|
||||
- builder_trace_operator
|
||||
- clickhouse_sql
|
||||
- promql
|
||||
type: string
|
||||
Querybuildertypesv5QueryWarnData:
|
||||
properties:
|
||||
message:
|
||||
@@ -4837,153 +4228,6 @@ components:
|
||||
message:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5RawData:
|
||||
properties:
|
||||
nextCursor:
|
||||
type: string
|
||||
queryName:
|
||||
type: string
|
||||
rows:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5RawRow'
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5RawRow:
|
||||
properties:
|
||||
data:
|
||||
additionalProperties: {}
|
||||
nullable: true
|
||||
type: object
|
||||
timestamp:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5ReduceTo:
|
||||
enum:
|
||||
- sum
|
||||
- count
|
||||
- avg
|
||||
- min
|
||||
- max
|
||||
- last
|
||||
- median
|
||||
type: string
|
||||
Querybuildertypesv5RequestType:
|
||||
enum:
|
||||
- scalar
|
||||
- time_series
|
||||
- raw
|
||||
- raw_stream
|
||||
- trace
|
||||
type: string
|
||||
Querybuildertypesv5ScalarData:
|
||||
properties:
|
||||
columns:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5ColumnDescriptor'
|
||||
nullable: true
|
||||
type: array
|
||||
data:
|
||||
items:
|
||||
items: {}
|
||||
type: array
|
||||
nullable: true
|
||||
type: array
|
||||
queryName:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5SecondaryAggregation:
|
||||
properties:
|
||||
alias:
|
||||
type: string
|
||||
expression:
|
||||
type: string
|
||||
groupBy:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5GroupByKey'
|
||||
type: array
|
||||
limit:
|
||||
type: integer
|
||||
limitBy:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5LimitBy'
|
||||
order:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5OrderBy'
|
||||
type: array
|
||||
stepInterval:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Step'
|
||||
type: object
|
||||
Querybuildertypesv5Step:
|
||||
description: Step interval. Accepts a Go duration string (e.g., "60s", "1m",
|
||||
"1h") or a number representing seconds (e.g., 60).
|
||||
oneOf:
|
||||
- description: Duration string (e.g., "60s", "5m", "1h").
|
||||
example: 60s
|
||||
type: string
|
||||
- description: Duration in seconds.
|
||||
example: 60
|
||||
type: number
|
||||
Querybuildertypesv5TimeSeries:
|
||||
properties:
|
||||
labels:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Label'
|
||||
type: array
|
||||
values:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5TimeSeriesValue'
|
||||
nullable: true
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5TimeSeriesData:
|
||||
properties:
|
||||
aggregations:
|
||||
items:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5AggregationBucket'
|
||||
nullable: true
|
||||
type: array
|
||||
queryName:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5TimeSeriesValue:
|
||||
properties:
|
||||
bucket:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5Bucket'
|
||||
partial:
|
||||
type: boolean
|
||||
timestamp:
|
||||
format: int64
|
||||
type: integer
|
||||
value:
|
||||
format: double
|
||||
type: number
|
||||
values:
|
||||
items:
|
||||
format: double
|
||||
type: number
|
||||
type: array
|
||||
type: object
|
||||
Querybuildertypesv5TraceAggregation:
|
||||
properties:
|
||||
alias:
|
||||
type: string
|
||||
expression:
|
||||
type: string
|
||||
type: object
|
||||
Querybuildertypesv5VariableItem:
|
||||
properties:
|
||||
type:
|
||||
$ref: '#/components/schemas/Querybuildertypesv5VariableType'
|
||||
value: {}
|
||||
type: object
|
||||
Querybuildertypesv5VariableType:
|
||||
enum:
|
||||
- query
|
||||
- dynamic
|
||||
- custom
|
||||
- text
|
||||
type: string
|
||||
RenderErrorResponse:
|
||||
properties:
|
||||
error:
|
||||
@@ -5010,48 +4254,6 @@ components:
|
||||
format: date-time
|
||||
type: string
|
||||
type: object
|
||||
TelemetrytypesFieldContext:
|
||||
enum:
|
||||
- metric
|
||||
- log
|
||||
- span
|
||||
- resource
|
||||
- attribute
|
||||
- body
|
||||
type: string
|
||||
TelemetrytypesFieldDataType:
|
||||
enum:
|
||||
- string
|
||||
- bool
|
||||
- float64
|
||||
- int64
|
||||
- number
|
||||
type: string
|
||||
TelemetrytypesSignal:
|
||||
enum:
|
||||
- traces
|
||||
- logs
|
||||
- metrics
|
||||
type: string
|
||||
TelemetrytypesSource:
|
||||
enum:
|
||||
- meter
|
||||
type: string
|
||||
TelemetrytypesTelemetryFieldKey:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
fieldContext:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldContext'
|
||||
fieldDataType:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
|
||||
name:
|
||||
type: string
|
||||
signal:
|
||||
$ref: '#/components/schemas/TelemetrytypesSignal'
|
||||
unit:
|
||||
type: string
|
||||
type: object
|
||||
TypesChangePasswordRequest:
|
||||
properties:
|
||||
newPassword:
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/apis/fields"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -23,8 +22,6 @@ import (
|
||||
rules "github.com/SigNoz/signoz/pkg/query-service/rules"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/signoz"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/version"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
@@ -111,22 +108,7 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.Handle("/api/v5/query_range", handler.New(am.ViewAccess(ah.queryRangeV5), handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
})).Methods(http.MethodPost)
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/common"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/transition"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
@@ -63,6 +63,8 @@ type AnomalyRule struct {
|
||||
seasonality anomaly.Seasonality
|
||||
}
|
||||
|
||||
var _ baserules.Rule = (*AnomalyRule)(nil)
|
||||
|
||||
func NewAnomalyRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -490,7 +492,7 @@ func (r *AnomalyRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration() {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.HoldDuration().Duration() {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
@@ -553,7 +555,7 @@ func (r *AnomalyRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.Name(),
|
||||
RuleCondition: r.Condition(),
|
||||
EvalWindow: ruletypes.Duration(r.EvalWindow()),
|
||||
EvalWindow: r.EvalWindow(),
|
||||
Labels: r.Labels().Map(),
|
||||
Annotations: r.Annotations().Map(),
|
||||
PreferredChannels: r.PreferredChannels(),
|
||||
|
||||
@@ -40,7 +40,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
target := 500.0
|
||||
@@ -50,8 +50,8 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
@@ -147,7 +147,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
|
||||
// Set target higher than test data so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
@@ -157,8 +157,8 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
|
||||
@@ -48,7 +48,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -72,7 +72,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeProm, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeAnomaly {
|
||||
// create anomaly rule
|
||||
@@ -96,7 +96,7 @@ func PrepareTaskFunc(opts baserules.PrepareTaskOptions) (baserules.Task, error)
|
||||
rules = append(rules, ar)
|
||||
|
||||
// create anomaly rule task for evaluation
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
task = newTask(baserules.TaskTypeCh, opts.TaskName, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
@@ -213,8 +213,7 @@ func TestNotification(opts baserules.PrepareTestRuleOptions) (int, *basemodel.Ap
|
||||
return alertsFound, nil
|
||||
}
|
||||
|
||||
// newTask returns an appropriate group for
|
||||
// rule type
|
||||
// newTask returns an appropriate group for the rule type
|
||||
func newTask(taskType baserules.TaskType, name string, frequency time.Duration, rules []baserules.Rule, opts *baserules.ManagerOptions, notify baserules.NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) baserules.Task {
|
||||
if taskType == baserules.TaskTypeCh {
|
||||
return baserules.NewRuleTask(name, "", frequency, rules, opts, notify, maintenanceStore, orgID)
|
||||
|
||||
@@ -762,6 +762,18 @@ export interface MetricsexplorertypesMetricHighlightsResponseDTO {
|
||||
totalTimeSeries: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesMetricMetadataDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesMetricMetadataDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -771,14 +783,29 @@ export interface MetricsexplorertypesMetricMetadataDTO {
|
||||
* @type boolean
|
||||
*/
|
||||
isMonotonic: boolean;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesMetricMetadataDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesMetricMetadataDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesStatDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesStatDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -798,7 +825,11 @@ export interface MetricsexplorertypesStatDTO {
|
||||
* @minimum 0
|
||||
*/
|
||||
timeseries: number;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesStatDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -858,7 +889,7 @@ export interface MetricsexplorertypesTreemapEntryDTO {
|
||||
totalValue: number;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesTreemapModeDTO {
|
||||
export enum MetricsexplorertypesTreemapRequestDTOMode {
|
||||
timeseries = 'timeseries',
|
||||
samples = 'samples',
|
||||
}
|
||||
@@ -873,7 +904,11 @@ export interface MetricsexplorertypesTreemapRequestDTO {
|
||||
* @type integer
|
||||
*/
|
||||
limit: number;
|
||||
mode: MetricsexplorertypesTreemapModeDTO;
|
||||
/**
|
||||
* @enum timeseries,samples
|
||||
* @type string
|
||||
*/
|
||||
mode: MetricsexplorertypesTreemapRequestDTOMode;
|
||||
/**
|
||||
* @type integer
|
||||
* @format int64
|
||||
@@ -894,6 +929,18 @@ export interface MetricsexplorertypesTreemapResponseDTO {
|
||||
timeseries: MetricsexplorertypesTreemapEntryDTO[] | null;
|
||||
}
|
||||
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetricsexplorertypesUpdateMetricMetadataRequestDTOType {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
/**
|
||||
* @type string
|
||||
@@ -907,26 +954,22 @@ export interface MetricsexplorertypesUpdateMetricMetadataRequestDTO {
|
||||
* @type string
|
||||
*/
|
||||
metricName: string;
|
||||
temporality: MetrictypesTemporalityDTO;
|
||||
type: MetrictypesTypeDTO;
|
||||
/**
|
||||
* @enum delta,cumulative,unspecified
|
||||
* @type string
|
||||
*/
|
||||
temporality: MetricsexplorertypesUpdateMetricMetadataRequestDTOTemporality;
|
||||
/**
|
||||
* @enum gauge,sum,histogram,summary,exponentialhistogram
|
||||
* @type string
|
||||
*/
|
||||
type: MetricsexplorertypesUpdateMetricMetadataRequestDTOType;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
unit: string;
|
||||
}
|
||||
|
||||
export enum MetrictypesTemporalityDTO {
|
||||
delta = 'delta',
|
||||
cumulative = 'cumulative',
|
||||
unspecified = 'unspecified',
|
||||
}
|
||||
export enum MetrictypesTypeDTO {
|
||||
gauge = 'gauge',
|
||||
sum = 'sum',
|
||||
histogram = 'histogram',
|
||||
summary = 'summary',
|
||||
exponentialhistogram = 'exponentialhistogram',
|
||||
}
|
||||
export interface PreferencetypesPreferenceDTO {
|
||||
/**
|
||||
* @type array
|
||||
@@ -1345,7 +1388,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
email: string;
|
||||
email?: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -1353,7 +1396,7 @@ export interface TypesPostableForgotPasswordDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgId: string;
|
||||
orgId?: string;
|
||||
}
|
||||
|
||||
export interface TypesPostableInviteDTO {
|
||||
|
||||
@@ -69,7 +69,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/opamptypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/pipelinetypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
traceFunnels "github.com/SigNoz/signoz/pkg/types/tracefunneltypes"
|
||||
|
||||
"go.uber.org/zap"
|
||||
@@ -104,10 +104,11 @@ type APIHandler struct {
|
||||
querierV2 interfaces.Querier
|
||||
queryBuilder *queryBuilder.QueryBuilder
|
||||
|
||||
// temporalityMap is a map of metric name to temporality
|
||||
// to avoid fetching temporality for the same metric multiple times
|
||||
// querying the v4 table on low cardinal temporality column
|
||||
// should be fast but we can still avoid the query if we have the data in memory
|
||||
// temporalityMap is a map of metric name to temporality to avoid fetching
|
||||
// temporality for the same metric multiple times.
|
||||
//
|
||||
// Querying the v4 table on a low cardinal temporality column should be
|
||||
// fast, but we can still avoid the query if we have the data in memory.
|
||||
temporalityMap map[string]map[v3.Temporality]bool
|
||||
temporalityMux sync.Mutex
|
||||
|
||||
@@ -1023,7 +1024,7 @@ func (aH *APIHandler) getRuleStateHistory(w http.ResponseWriter, r *http.Request
|
||||
// the query range is calculated based on the rule's evalWindow and evalDelay
|
||||
// alerts have 2 minutes delay built in, so we need to subtract that from the start time
|
||||
// to get the correct query range
|
||||
start := end.Add(-time.Duration(rule.EvalWindow)).Add(-3 * time.Minute)
|
||||
start := end.Add(-rule.EvalWindow.Duration() - 3*time.Minute)
|
||||
if rule.AlertType == ruletypes.AlertTypeLogs {
|
||||
if rule.Version != "v5" {
|
||||
res.Items[idx].RelatedLogsLink = contextlinks.PrepareLinksToLogs(start, end, newFilters)
|
||||
@@ -1230,12 +1231,12 @@ func (aH *APIHandler) Get(rw http.ResponseWriter, r *http.Request) {
|
||||
|
||||
dashboard := new(dashboardtypes.Dashboard)
|
||||
if aH.CloudIntegrationsController.IsCloudIntegrationDashboardUuid(id) {
|
||||
cloudintegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
|
||||
cloudIntegrationDashboard, apiErr := aH.CloudIntegrationsController.GetDashboardById(ctx, orgID, id)
|
||||
if apiErr != nil {
|
||||
render.Error(rw, errorsV2.Wrapf(apiErr, errorsV2.TypeInternal, errorsV2.CodeInternal, "failed to get dashboard"))
|
||||
return
|
||||
}
|
||||
dashboard = cloudintegrationDashboard
|
||||
dashboard = cloudIntegrationDashboard
|
||||
} else if aH.IntegrationsController.IsInstalledIntegrationDashboardID(id) {
|
||||
integrationDashboard, apiErr := aH.IntegrationsController.GetInstalledIntegrationDashboardById(ctx, orgID, id)
|
||||
if apiErr != nil {
|
||||
@@ -1564,13 +1565,13 @@ func (aH *APIHandler) queryMetrics(w http.ResponseWriter, r *http.Request) {
|
||||
RespondError(w, &model.ApiError{Typ: model.ErrorExec, Err: res.Err}, nil)
|
||||
}
|
||||
|
||||
response_data := &model.QueryData{
|
||||
responseData := &model.QueryData{
|
||||
ResultType: res.Value.Type(),
|
||||
Result: res.Value,
|
||||
Stats: qs,
|
||||
}
|
||||
|
||||
aH.Respond(w, response_data)
|
||||
aH.Respond(w, responseData)
|
||||
|
||||
}
|
||||
|
||||
@@ -2652,12 +2653,12 @@ func (aH *APIHandler) getProducerData(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2705,12 +2706,12 @@ func (aH *APIHandler) getConsumerData(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2759,12 +2760,12 @@ func (aH *APIHandler) getPartitionOverviewLatencyData(w http.ResponseWriter, r *
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2813,12 +2814,12 @@ func (aH *APIHandler) getConsumerPartitionLatencyData(w http.ResponseWriter, r *
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -2870,12 +2871,12 @@ func (aH *APIHandler) getProducerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, producerQueryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -2981,12 +2982,12 @@ func (aH *APIHandler) getProducerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -3035,12 +3036,12 @@ func (aH *APIHandler) getConsumerThroughputOverview(w http.ResponseWriter, r *ht
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -3089,12 +3090,12 @@ func (aH *APIHandler) getConsumerThroughputDetails(w http.ResponseWriter, r *htt
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
result = postprocess.TransformToTableForClickHouseQueries(result)
|
||||
@@ -3149,12 +3150,12 @@ func (aH *APIHandler) getProducerConsumerEval(w http.ResponseWriter, r *http.Req
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(r.Context(), orgID, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -4138,11 +4139,11 @@ func (aH *APIHandler) ListLogsPipelinesHandler(w http.ResponseWriter, r *http.Re
|
||||
aH.Respond(w, payload)
|
||||
}
|
||||
|
||||
// listLogsPipelines lists logs piplines for latest version
|
||||
// listLogsPipelines lists logs pipelines for latest version
|
||||
func (aH *APIHandler) listLogsPipelines(ctx context.Context, orgID valuer.UUID) (
|
||||
*logparsingpipeline.PipelinesResponse, error,
|
||||
) {
|
||||
// get lateset agent config
|
||||
// get latest agent config
|
||||
latestVersion := -1
|
||||
lastestConfig, err := agentConf.GetLatestVersion(ctx, orgID, opamptypes.ElementTypeLogPipelines)
|
||||
if err != nil && !errorsV2.Ast(err, errorsV2.TypeNotFound) {
|
||||
@@ -4439,7 +4440,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
var spanKeys map[string]v3.AttributeKey
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
hasLogsQuery := false
|
||||
@@ -4456,7 +4457,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
if logsv3.EnrichmentRequired(queryRangeParams) && hasLogsQuery {
|
||||
logsFields, apiErr := aH.reader.GetLogFieldsFromNames(ctx, logsv3.GetFieldNames(queryRangeParams.CompositeQuery))
|
||||
if apiErr != nil {
|
||||
RespondError(w, apiErr, errQuriesByName)
|
||||
RespondError(w, apiErr, errQueriesByName)
|
||||
return
|
||||
}
|
||||
// get the fields if any logs query is present
|
||||
@@ -4467,7 +4468,7 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
@@ -4512,11 +4513,11 @@ func (aH *APIHandler) queryRangeV3(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
}
|
||||
|
||||
result, errQuriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querier.QueryRange(ctx, orgID, queryRangeParams)
|
||||
|
||||
if err != nil {
|
||||
queryErrors := map[string]string{}
|
||||
for name, err := range errQuriesByName {
|
||||
for name, err := range errQueriesByName {
|
||||
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
|
||||
}
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
@@ -4792,7 +4793,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
|
||||
var result []*v3.Result
|
||||
var errQuriesByName map[string]error
|
||||
var errQueriesByName map[string]error
|
||||
var spanKeys map[string]v3.AttributeKey
|
||||
if queryRangeParams.CompositeQuery.QueryType == v3.QueryTypeBuilder {
|
||||
hasLogsQuery := false
|
||||
@@ -4822,7 +4823,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
spanKeys, err = aH.getSpanKeysV3(ctx, queryRangeParams)
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
tracesV4.Enrich(queryRangeParams, spanKeys)
|
||||
@@ -4845,11 +4846,11 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
}
|
||||
}
|
||||
|
||||
result, errQuriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
|
||||
result, errQueriesByName, err = aH.querierV2.QueryRange(ctx, orgID, queryRangeParams)
|
||||
|
||||
if err != nil {
|
||||
queryErrors := map[string]string{}
|
||||
for name, err := range errQuriesByName {
|
||||
for name, err := range errQueriesByName {
|
||||
queryErrors[fmt.Sprintf("Query-%s", name)] = err.Error()
|
||||
}
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorInternal, Err: err}
|
||||
@@ -4866,7 +4867,7 @@ func (aH *APIHandler) queryRangeV4(ctx context.Context, queryRangeParams *v3.Que
|
||||
|
||||
if err != nil {
|
||||
apiErrObj := &model.ApiError{Typ: model.ErrorBadData, Err: err}
|
||||
RespondError(w, apiErrObj, errQuriesByName)
|
||||
RespondError(w, apiErrObj, errQueriesByName)
|
||||
return
|
||||
}
|
||||
aH.sendQueryResultEvents(r, result, queryRangeParams, "v4")
|
||||
|
||||
@@ -5,10 +5,10 @@ import (
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
const (
|
||||
@@ -40,11 +40,11 @@ const NormalizedMetricsMapQueryThreads = 10
|
||||
var NormalizedMetricsMapRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
|
||||
var NormalizedMetricsMapQuantileRegex = regexp.MustCompile(`(?i)([._-]?quantile.*)$`)
|
||||
|
||||
func GetEvalDelay() time.Duration {
|
||||
func GetEvalDelay() valuer.TextDuration {
|
||||
evalDelayStr := GetOrDefaultEnv("RULES_EVAL_DELAY", "2m")
|
||||
evalDelayDuration, err := time.ParseDuration(evalDelayStr)
|
||||
evalDelayDuration, err := valuer.ParseTextDuration(evalDelayStr)
|
||||
if err != nil {
|
||||
return 0
|
||||
return valuer.TextDuration{}
|
||||
}
|
||||
return evalDelayDuration
|
||||
}
|
||||
|
||||
@@ -40,13 +40,13 @@ type BaseRule struct {
|
||||
// evalWindow is the time window used for evaluating the rule
|
||||
// i.e. each time we lookback from the current time, we look at data for the last
|
||||
// evalWindow duration
|
||||
evalWindow time.Duration
|
||||
evalWindow valuer.TextDuration
|
||||
// holdDuration is the duration for which the alert waits before firing
|
||||
holdDuration time.Duration
|
||||
holdDuration valuer.TextDuration
|
||||
|
||||
// evalDelay is the delay in evaluation of the rule
|
||||
// this is useful in cases where the data is not available immediately
|
||||
evalDelay time.Duration
|
||||
evalDelay valuer.TextDuration
|
||||
|
||||
// holds the static set of labels and annotations for the rule
|
||||
// these are the same for all alerts created for this rule
|
||||
@@ -94,7 +94,7 @@ type BaseRule struct {
|
||||
evaluation ruletypes.Evaluation
|
||||
|
||||
// newGroupEvalDelay is the grace period for new alert groups
|
||||
newGroupEvalDelay *time.Duration
|
||||
newGroupEvalDelay valuer.TextDuration
|
||||
|
||||
queryParser queryparser.QueryParser
|
||||
}
|
||||
@@ -113,7 +113,7 @@ func WithSendUnmatched() RuleOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithEvalDelay(dur time.Duration) RuleOption {
|
||||
func WithEvalDelay(dur valuer.TextDuration) RuleOption {
|
||||
return func(r *BaseRule) {
|
||||
r.evalDelay = dur
|
||||
}
|
||||
@@ -163,7 +163,7 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
|
||||
source: p.Source,
|
||||
typ: p.AlertType,
|
||||
ruleCondition: p.RuleCondition,
|
||||
evalWindow: time.Duration(p.EvalWindow),
|
||||
evalWindow: p.EvalWindow,
|
||||
labels: qslabels.FromMap(p.Labels),
|
||||
annotations: qslabels.FromMap(p.Annotations),
|
||||
preferredChannels: p.PreferredChannels,
|
||||
@@ -176,13 +176,12 @@ func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, reader
|
||||
}
|
||||
|
||||
// Store newGroupEvalDelay and groupBy keys from NotificationSettings
|
||||
if p.NotificationSettings != nil && p.NotificationSettings.NewGroupEvalDelay != nil {
|
||||
newGroupEvalDelay := time.Duration(*p.NotificationSettings.NewGroupEvalDelay)
|
||||
baseRule.newGroupEvalDelay = &newGroupEvalDelay
|
||||
if p.NotificationSettings != nil {
|
||||
baseRule.newGroupEvalDelay = p.NotificationSettings.NewGroupEvalDelay
|
||||
}
|
||||
|
||||
if baseRule.evalWindow == 0 {
|
||||
baseRule.evalWindow = 5 * time.Minute
|
||||
if baseRule.evalWindow.IsZero() {
|
||||
baseRule.evalWindow = valuer.MustParseTextDuration("5m")
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
@@ -245,15 +244,15 @@ func (r *BaseRule) ActiveAlertsLabelFP() map[uint64]struct{} {
|
||||
return activeAlerts
|
||||
}
|
||||
|
||||
func (r *BaseRule) EvalDelay() time.Duration {
|
||||
func (r *BaseRule) EvalDelay() valuer.TextDuration {
|
||||
return r.evalDelay
|
||||
}
|
||||
|
||||
func (r *BaseRule) EvalWindow() time.Duration {
|
||||
func (r *BaseRule) EvalWindow() valuer.TextDuration {
|
||||
return r.evalWindow
|
||||
}
|
||||
|
||||
func (r *BaseRule) HoldDuration() time.Duration {
|
||||
func (r *BaseRule) HoldDuration() valuer.TextDuration {
|
||||
return r.holdDuration
|
||||
}
|
||||
|
||||
@@ -281,7 +280,7 @@ func (r *BaseRule) Timestamps(ts time.Time) (time.Time, time.Time) {
|
||||
start := st.UnixMilli()
|
||||
end := en.UnixMilli()
|
||||
|
||||
if r.evalDelay > 0 {
|
||||
if r.evalDelay.IsPositive() {
|
||||
start = start - r.evalDelay.Milliseconds()
|
||||
end = end - r.evalDelay.Milliseconds()
|
||||
}
|
||||
@@ -552,7 +551,7 @@ func (r *BaseRule) PopulateTemporality(ctx context.Context, orgID valuer.UUID, q
|
||||
|
||||
// ShouldSkipNewGroups returns true if new group filtering should be applied
|
||||
func (r *BaseRule) ShouldSkipNewGroups() bool {
|
||||
return r.newGroupEvalDelay != nil && *r.newGroupEvalDelay > 0
|
||||
return r.newGroupEvalDelay.IsPositive()
|
||||
}
|
||||
|
||||
// isFilterNewSeriesSupported checks if the query is supported for new series filtering
|
||||
|
||||
@@ -20,7 +20,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
@@ -124,8 +124,8 @@ func createPostableRule(compositeQuery *v3.CompositeQuery) ruletypes.PostableRul
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{
|
||||
Kind: ruletypes.RollingEvaluation,
|
||||
Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
},
|
||||
},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
@@ -151,7 +151,7 @@ type filterNewSeriesTestCase struct {
|
||||
compositeQuery *v3.CompositeQuery
|
||||
series []*v3.Series
|
||||
firstSeenMap map[telemetrytypes.MetricMetadataLookupKey]int64
|
||||
newGroupEvalDelay *time.Duration
|
||||
newGroupEvalDelay valuer.TextDuration
|
||||
evalTime time.Time
|
||||
expectedFiltered []*v3.Series // series that should be in the final filtered result (old enough)
|
||||
expectError bool
|
||||
@@ -159,7 +159,8 @@ type filterNewSeriesTestCase struct {
|
||||
|
||||
func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
defaultEvalTime := time.Unix(1700000000, 0)
|
||||
defaultDelay := 2 * time.Minute
|
||||
defaultNewGroupEvalDelay := valuer.MustParseTextDuration("2m")
|
||||
defaultDelay := defaultNewGroupEvalDelay.Duration()
|
||||
defaultGroupByFields := []string{"service_name", "env"}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
@@ -202,7 +203,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new", "prod"),
|
||||
// svc-missing has no metadata, so it will be included
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
@@ -234,7 +235,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new1", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, false, "svc-new2", "stage"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{}, // all should be filtered out (new series)
|
||||
},
|
||||
@@ -261,7 +262,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old1", "prod"),
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old2", "stage"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old1", "env": "prod"}, nil),
|
||||
@@ -295,7 +296,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -325,7 +326,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -361,7 +362,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"status": "200"}, nil), // no service_name or env
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"status": "200"}, nil),
|
||||
@@ -390,7 +391,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc-old", "prod"),
|
||||
// svc-no-metadata has no entry in firstSeenMap
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-old", "env": "prod"}, nil),
|
||||
@@ -420,7 +421,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
{MetricName: "request_total", AttributeName: "service_name", AttributeValue: "svc-partial"}: calculateFirstSeen(defaultEvalTime, defaultDelay, true),
|
||||
// env metadata is missing
|
||||
},
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc-partial", "env": "prod"}, nil),
|
||||
@@ -454,7 +455,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
},
|
||||
series: []*v3.Series{},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{},
|
||||
},
|
||||
@@ -488,7 +489,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
},
|
||||
firstSeenMap: createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
newGroupEvalDelay: func() *time.Duration { d := time.Duration(0); return &d }(), // zero delay
|
||||
newGroupEvalDelay: valuer.TextDuration{}, // zero delay
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -532,7 +533,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
createFirstSeenMap("error_total", defaultGroupByFields, defaultEvalTime, defaultDelay, true, "svc1", "prod"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1", "env": "prod"}, nil),
|
||||
@@ -572,7 +573,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createFirstSeenMap("request_total", []string{"service_name"}, defaultEvalTime, defaultDelay, true, "svc1"),
|
||||
createFirstSeenMap("request_total", []string{"env"}, defaultEvalTime, defaultDelay, false, "prod"),
|
||||
),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{}, // max first_seen is new, so should be filtered out
|
||||
},
|
||||
@@ -604,7 +605,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
@@ -639,7 +640,7 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
createTestSeries(map[string]string{"service_name": "svc2"}, nil),
|
||||
},
|
||||
firstSeenMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
newGroupEvalDelay: &defaultDelay,
|
||||
newGroupEvalDelay: defaultNewGroupEvalDelay,
|
||||
evalTime: defaultEvalTime,
|
||||
expectedFiltered: []*v3.Series{
|
||||
createTestSeries(map[string]string{"service_name": "svc1"}, nil),
|
||||
@@ -697,20 +698,14 @@ func TestBaseRule_FilterNewSeries(t *testing.T) {
|
||||
telemetryStore,
|
||||
prometheustest.New(context.Background(), settings, prometheus.Config{}, telemetryStore),
|
||||
"",
|
||||
time.Duration(time.Second),
|
||||
time.Second,
|
||||
nil,
|
||||
readerCache,
|
||||
options,
|
||||
)
|
||||
|
||||
// Set newGroupEvalDelay in NotificationSettings if provided
|
||||
if tt.newGroupEvalDelay != nil {
|
||||
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
|
||||
NewGroupEvalDelay: func() *ruletypes.Duration {
|
||||
d := ruletypes.Duration(*tt.newGroupEvalDelay)
|
||||
return &d
|
||||
}(),
|
||||
}
|
||||
postableRule.NotificationSettings = &ruletypes.NotificationSettings{
|
||||
NewGroupEvalDelay: tt.newGroupEvalDelay,
|
||||
}
|
||||
|
||||
// Create BaseRule using NewBaseRule
|
||||
|
||||
@@ -30,7 +30,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
@@ -66,7 +66,7 @@ type PrepareTestRuleOptions struct {
|
||||
OrgID valuer.UUID
|
||||
}
|
||||
|
||||
const taskNamesuffix = "webAppEditor"
|
||||
const taskNameSuffix = "webAppEditor"
|
||||
|
||||
func RuleIdFromTaskName(n string) string {
|
||||
return strings.Split(n, "-groupname")[0]
|
||||
@@ -97,7 +97,7 @@ type ManagerOptions struct {
|
||||
SLogger *slog.Logger
|
||||
Cache cache.Cache
|
||||
|
||||
EvalDelay time.Duration
|
||||
EvalDelay valuer.TextDuration
|
||||
|
||||
PrepareTaskFunc func(opts PrepareTaskOptions) (Task, error)
|
||||
PrepareTestRuleFunc func(opts PrepareTestRuleOptions) (int, *model.ApiError)
|
||||
@@ -182,8 +182,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
|
||||
rules = append(rules, tr)
|
||||
|
||||
// create ch rule task for evalution
|
||||
task = newTask(TaskTypeCh, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create ch rule task for evaluation
|
||||
task = newTask(TaskTypeCh, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else if opts.Rule.RuleType == ruletypes.RuleTypeProm {
|
||||
|
||||
@@ -206,8 +206,8 @@ func defaultPrepareTaskFunc(opts PrepareTaskOptions) (Task, error) {
|
||||
|
||||
rules = append(rules, pr)
|
||||
|
||||
// create promql rule task for evalution
|
||||
task = newTask(TaskTypeProm, opts.TaskName, taskNamesuffix, time.Duration(evaluation.GetFrequency()), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
// create promql rule task for evaluation
|
||||
task = newTask(TaskTypeProm, opts.TaskName, taskNameSuffix, evaluation.GetFrequency().Duration(), rules, opts.ManagerOpts, opts.NotifyFunc, opts.MaintenanceStore, opts.OrgID)
|
||||
|
||||
} else {
|
||||
return nil, fmt.Errorf("unsupported rule type %s. Supported types: %s, %s", opts.Rule.RuleType, ruletypes.RuleTypeProm, ruletypes.RuleTypeThreshold)
|
||||
@@ -323,7 +323,7 @@ func (m *Manager) run(_ context.Context) {
|
||||
}
|
||||
|
||||
// Stop the rule manager's rule evaluation cycles.
|
||||
func (m *Manager) Stop(ctx context.Context) {
|
||||
func (m *Manager) Stop(_ context.Context) {
|
||||
m.mtx.Lock()
|
||||
defer m.mtx.Unlock()
|
||||
|
||||
@@ -336,7 +336,7 @@ func (m *Manager) Stop(ctx context.Context) {
|
||||
zap.L().Info("Rule manager stopped")
|
||||
}
|
||||
|
||||
// EditRuleDefinition writes the rule definition to the
|
||||
// EditRule writes the rule definition to the
|
||||
// datastore and also updates the rule executor
|
||||
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
|
||||
claims, err := authtypes.ClaimsFromContext(ctx)
|
||||
@@ -643,7 +643,7 @@ func (m *Manager) addTask(_ context.Context, orgID valuer.UUID, rule *ruletypes.
|
||||
m.rules[r.ID()] = r
|
||||
}
|
||||
|
||||
// If there is an another task with the same identifier, raise an error
|
||||
// If there is another task with the same identifier, raise an error
|
||||
_, ok := m.tasks[taskName]
|
||||
if ok {
|
||||
return fmt.Errorf("a rule with the same name already exists")
|
||||
@@ -678,7 +678,8 @@ func (m *Manager) RuleTasks() []Task {
|
||||
return rgs
|
||||
}
|
||||
|
||||
// RuleTasks returns the list of manager's rule tasks.
|
||||
// RuleTasksWithoutLock returns the list of manager's rule tasks without
|
||||
// acquiring a lock on the manager.
|
||||
func (m *Manager) RuleTasksWithoutLock() []Task {
|
||||
|
||||
rgs := make([]Task, 0, len(m.tasks))
|
||||
@@ -889,7 +890,7 @@ func (m *Manager) syncRuleStateWithTask(ctx context.Context, orgID valuer.UUID,
|
||||
} else {
|
||||
// check if rule has a task running
|
||||
if _, ok := m.tasks[taskName]; !ok {
|
||||
// rule has not task, start one
|
||||
// rule has no task, start one
|
||||
if err := m.addTask(ctx, orgID, rule, taskName); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// ThresholdRuleTestCase defines test case structure for threshold rule test notifications
|
||||
@@ -40,8 +41,8 @@ func ThresholdRuleAtLeastOnceValueAbove(target float64, recovery *float64) rulet
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
Labels: map[string]string{
|
||||
"service.name": "frontend",
|
||||
@@ -99,8 +100,8 @@ func BuildPromAtLeastOnceValueAbove(target float64, recovery *float64) ruletypes
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
Labels: map[string]string{
|
||||
"service.name": "frontend",
|
||||
|
||||
@@ -28,6 +28,8 @@ type PromRule struct {
|
||||
prometheus prometheus.Prometheus
|
||||
}
|
||||
|
||||
var _ Rule = (*PromRule)(nil)
|
||||
|
||||
func NewPromRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -332,7 +334,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
state := model.StateFiring
|
||||
@@ -396,7 +398,7 @@ func (r *PromRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.name,
|
||||
RuleCondition: r.ruleCondition,
|
||||
EvalWindow: ruletypes.Duration(r.evalWindow),
|
||||
EvalWindow: r.evalWindow,
|
||||
Labels: r.labels.Map(),
|
||||
Annotations: r.annotations.Map(),
|
||||
PreferredChannels: r.preferredChannels,
|
||||
|
||||
@@ -41,12 +41,12 @@ type PromRuleTask struct {
|
||||
orgID valuer.UUID
|
||||
}
|
||||
|
||||
// newPromRuleTask holds rules that have promql condition
|
||||
// and evalutes the rule at a given frequency
|
||||
// NewPromRuleTask holds rules that have promql condition
|
||||
// and evaluates the rule at a given frequency
|
||||
func NewPromRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *PromRuleTask {
|
||||
zap.L().Info("Initiating a new rule group", zap.String("name", name), zap.Duration("frequency", frequency))
|
||||
|
||||
if time.Now() == time.Now().Add(frequency) {
|
||||
if frequency == 0 {
|
||||
frequency = DefaultFrequency
|
||||
}
|
||||
|
||||
|
||||
@@ -41,8 +41,8 @@ func TestPromRuleEval(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -748,8 +748,8 @@ func TestPromRuleUnitCombinations(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1007,8 +1007,8 @@ func _Enable_this_after_9146_issue_fix_is_merged_TestPromRuleNoData(t *testing.T
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1118,8 +1118,8 @@ func TestMultipleThresholdPromRule(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1353,8 +1353,8 @@ func TestPromRule_NoData(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
@@ -1466,7 +1466,7 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
|
||||
// Set target higher than test data (100.0) so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
@@ -1476,8 +1476,8 @@ func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
@@ -1619,7 +1619,7 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
evalWindow := 5 * time.Minute
|
||||
evalWindow := valuer.MustParseTextDuration("5m")
|
||||
lookBackDelta := time.Minute
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
@@ -1627,8 +1627,8 @@ func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
EvalWindow: evalWindow,
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// A Rule encapsulates a vector expression which is evaluated at a specified
|
||||
@@ -19,9 +20,9 @@ type Rule interface {
|
||||
Labels() labels.BaseLabels
|
||||
Annotations() labels.BaseLabels
|
||||
Condition() *ruletypes.RuleCondition
|
||||
EvalDelay() time.Duration
|
||||
EvalWindow() time.Duration
|
||||
HoldDuration() time.Duration
|
||||
EvalDelay() valuer.TextDuration
|
||||
EvalWindow() valuer.TextDuration
|
||||
HoldDuration() valuer.TextDuration
|
||||
State() model.AlertState
|
||||
ActiveAlerts() []*ruletypes.Alert
|
||||
// ActiveAlertsLabelFP returns a map of active alert labels fingerprint
|
||||
|
||||
@@ -43,7 +43,7 @@ const DefaultFrequency = 1 * time.Minute
|
||||
// NewRuleTask makes a new RuleTask with the given name, options, and rules.
|
||||
func NewRuleTask(name, file string, frequency time.Duration, rules []Rule, opts *ManagerOptions, notify NotifyFunc, maintenanceStore ruletypes.MaintenanceStore, orgID valuer.UUID) *RuleTask {
|
||||
|
||||
if time.Now() == time.Now().Add(frequency) {
|
||||
if frequency == 0 {
|
||||
frequency = DefaultFrequency
|
||||
}
|
||||
zap.L().Info("initiating a new rule task", zap.String("name", name), zap.Duration("frequency", frequency))
|
||||
@@ -78,6 +78,7 @@ func (g *RuleTask) Type() TaskType { return TaskTypeCh }
|
||||
func (g *RuleTask) Rules() []Rule { return g.rules }
|
||||
|
||||
// Interval returns the group's interval.
|
||||
// TODO: remove (unused)?
|
||||
func (g *RuleTask) Interval() time.Duration { return g.frequency }
|
||||
|
||||
func (g *RuleTask) Pause(b bool) {
|
||||
|
||||
@@ -61,6 +61,8 @@ type ThresholdRule struct {
|
||||
spansKeys map[string]v3.AttributeKey
|
||||
}
|
||||
|
||||
var _ Rule = (*ThresholdRule)(nil)
|
||||
|
||||
func NewThresholdRule(
|
||||
id string,
|
||||
orgID valuer.UUID,
|
||||
@@ -746,7 +748,7 @@ func (r *ThresholdRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration {
|
||||
if a.State == model.StatePending && ts.Sub(a.ActiveAt) >= r.holdDuration.Duration() {
|
||||
r.logger.DebugContext(ctx, "converting pending alert to firing", "name", r.Name())
|
||||
a.State = model.StateFiring
|
||||
a.FiredAt = ts
|
||||
@@ -812,7 +814,7 @@ func (r *ThresholdRule) String() string {
|
||||
ar := ruletypes.PostableRule{
|
||||
AlertName: r.name,
|
||||
RuleCondition: r.ruleCondition,
|
||||
EvalWindow: ruletypes.Duration(r.evalWindow),
|
||||
EvalWindow: r.evalWindow,
|
||||
Labels: r.labels.Map(),
|
||||
Annotations: r.annotations.Map(),
|
||||
PreferredChannels: r.preferredChannels,
|
||||
|
||||
@@ -36,8 +36,8 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -72,7 +72,7 @@ func TestThresholdRuleEvalBackwardCompat(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -152,8 +152,8 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -189,7 +189,7 @@ func TestPrepareLinksToLogs(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -206,8 +206,8 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -250,7 +250,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -267,8 +267,8 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -311,7 +311,7 @@ func TestPrepareLinksToTracesV5(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -328,8 +328,8 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -365,7 +365,7 @@ func TestPrepareLinksToTraces(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -382,8 +382,8 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{ruletypes.RollingEvaluation, ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -451,7 +451,7 @@ func TestThresholdRuleLabelNormalization(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -490,8 +490,8 @@ func TestThresholdRuleEvalDelay(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -553,8 +553,8 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -594,7 +594,7 @@ func TestThresholdRuleClickHouseTmpl(t *testing.T) {
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for idx, c := range cases {
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
@@ -615,8 +615,8 @@ func TestThresholdRuleUnitCombinations(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -816,8 +816,8 @@ func TestThresholdRuleNoData(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -927,8 +927,8 @@ func TestThresholdRuleTracesLink(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeTraces,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1052,8 +1052,8 @@ func TestThresholdRuleLogsLink(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1190,8 +1190,8 @@ func TestThresholdRuleShiftBy(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeLogs,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
@@ -1264,8 +1264,8 @@ func TestMultipleThresholdRule(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1455,8 +1455,8 @@ func TestThresholdRuleEval_BasicCases(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1486,8 +1486,8 @@ func TestThresholdRuleEval_MatchPlusCompareOps(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1523,8 +1523,8 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1559,7 +1559,7 @@ func TestThresholdRuleEval_SendUnmatchedBypassesRecovery(t *testing.T) {
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
require.NoError(t, err)
|
||||
|
||||
now := time.Now()
|
||||
@@ -1611,8 +1611,8 @@ func TestThresholdRuleEval_SendUnmatchedVariants(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1735,8 +1735,8 @@ func TestThresholdRuleEval_RecoveryNotMetSendUnmatchedFalse(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -1820,7 +1820,7 @@ func runEvalTests(t *testing.T, postableRule ruletypes.PostableRule, testCases [
|
||||
Spec: thresholds,
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
@@ -1927,7 +1927,7 @@ func runMultiThresholdEvalTests(t *testing.T, postableRule ruletypes.PostableRul
|
||||
Spec: thresholds,
|
||||
}
|
||||
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(2*time.Minute))
|
||||
rule, err := NewThresholdRule("69", valuer.GenerateUUID(), &postableRule, nil, nil, logger, WithEvalDelay(valuer.MustParseTextDuration("2m")))
|
||||
if err != nil {
|
||||
assert.NoError(t, err)
|
||||
return
|
||||
@@ -2035,8 +2035,8 @@ func TestThresholdRuleEval_MultiThreshold(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
@@ -2066,8 +2066,8 @@ func TestThresholdEval_RequireMinPoints(t *testing.T) {
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeThreshold,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
|
||||
@@ -2,7 +2,6 @@ package signoz
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -24,8 +23,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
@@ -60,10 +57,6 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -104,25 +97,3 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
|
||||
|
||||
return os.WriteFile(path, spec, 0o600)
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
},
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -31,13 +31,6 @@ var (
|
||||
TreemapModeSamples = TreemapMode{valuer.NewString("samples")}
|
||||
)
|
||||
|
||||
func (TreemapMode) Enum() []any {
|
||||
return []any{
|
||||
TreemapModeTimeSeries,
|
||||
TreemapModeSamples,
|
||||
}
|
||||
}
|
||||
|
||||
// StatsRequest represents the payload accepted by the metrics stats endpoint.
|
||||
type StatsRequest struct {
|
||||
Filter *qbtypes.Filter `json:"filter,omitempty"`
|
||||
@@ -105,7 +98,7 @@ func (req *StatsRequest) UnmarshalJSON(data []byte) error {
|
||||
type Stat struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
TimeSeries uint64 `json:"timeseries" required:"true"`
|
||||
Samples uint64 `json:"samples" required:"true"`
|
||||
@@ -119,9 +112,9 @@ type StatsResponse struct {
|
||||
|
||||
type MetricMetadata struct {
|
||||
Description string `json:"description" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true"`
|
||||
MetricType metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
MetricUnit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -138,10 +131,10 @@ func (m *MetricMetadata) UnmarshalBinary(data []byte) error {
|
||||
// UpdateMetricMetadataRequest represents the payload for updating metric metadata.
|
||||
type UpdateMetricMetadataRequest struct {
|
||||
MetricName string `json:"metricName" required:"true"`
|
||||
Type metrictypes.Type `json:"type" required:"true"`
|
||||
Type metrictypes.Type `json:"type" required:"true" enum:"gauge,sum,histogram,summary,exponentialhistogram"`
|
||||
Description string `json:"description" required:"true"`
|
||||
Unit string `json:"unit" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true"`
|
||||
Temporality metrictypes.Temporality `json:"temporality" required:"true" enum:"delta,cumulative,unspecified"`
|
||||
IsMonotonic bool `json:"isMonotonic" required:"true"`
|
||||
}
|
||||
|
||||
@@ -151,7 +144,7 @@ type TreemapRequest struct {
|
||||
Start int64 `json:"start" required:"true"`
|
||||
End int64 `json:"end" required:"true"`
|
||||
Limit int `json:"limit" required:"true"`
|
||||
Mode TreemapMode `json:"mode" required:"true"`
|
||||
Mode TreemapMode `json:"mode" required:"true" enum:"timeseries,samples"`
|
||||
}
|
||||
|
||||
// Validate enforces basic constraints on TreemapRequest.
|
||||
|
||||
@@ -36,7 +36,7 @@ func (t Temporality) Value() (driver.Value, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *Temporality) Scan(src any) error {
|
||||
func (t *Temporality) Scan(src interface{}) error {
|
||||
if src == nil {
|
||||
*t = Unknown
|
||||
return nil
|
||||
@@ -66,14 +66,6 @@ func (t *Temporality) Scan(src any) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (Temporality) Enum() []any {
|
||||
return []any{
|
||||
Delta,
|
||||
Cumulative,
|
||||
Unspecified,
|
||||
}
|
||||
}
|
||||
|
||||
// Type is the type of the metric in OTLP data model
|
||||
// Read more here https://opentelemetry.io/docs/specs/otel/metrics/data-model/#metric-points
|
||||
type Type struct {
|
||||
@@ -142,16 +134,6 @@ var (
|
||||
UnspecifiedType = Type{valuer.NewString("")}
|
||||
)
|
||||
|
||||
func (Type) Enum() []any {
|
||||
return []any{
|
||||
GaugeType,
|
||||
SumType,
|
||||
HistogramType,
|
||||
SummaryType,
|
||||
ExpHistogramType,
|
||||
}
|
||||
}
|
||||
|
||||
type TimeAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -169,21 +151,6 @@ var (
|
||||
TimeAggregationIncrease = TimeAggregation{valuer.NewString("increase")}
|
||||
)
|
||||
|
||||
func (TimeAggregation) Enum() []any {
|
||||
return []any{
|
||||
TimeAggregationUnspecified,
|
||||
TimeAggregationLatest,
|
||||
TimeAggregationSum,
|
||||
TimeAggregationAvg,
|
||||
TimeAggregationMin,
|
||||
TimeAggregationMax,
|
||||
TimeAggregationCount,
|
||||
TimeAggregationCountDistinct,
|
||||
TimeAggregationRate,
|
||||
TimeAggregationIncrease,
|
||||
}
|
||||
}
|
||||
|
||||
type SpaceAggregation struct {
|
||||
valuer.String
|
||||
}
|
||||
@@ -202,22 +169,6 @@ var (
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
)
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
return []any{
|
||||
SpaceAggregationUnspecified,
|
||||
SpaceAggregationSum,
|
||||
SpaceAggregationAvg,
|
||||
SpaceAggregationMin,
|
||||
SpaceAggregationMax,
|
||||
SpaceAggregationCount,
|
||||
SpaceAggregationPercentile50,
|
||||
SpaceAggregationPercentile75,
|
||||
SpaceAggregationPercentile90,
|
||||
SpaceAggregationPercentile95,
|
||||
SpaceAggregationPercentile99,
|
||||
}
|
||||
}
|
||||
|
||||
func (s SpaceAggregation) IsPercentile() bool {
|
||||
return s == SpaceAggregationPercentile50 ||
|
||||
s == SpaceAggregationPercentile75 ||
|
||||
|
||||
@@ -1,707 +0,0 @@
|
||||
package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for QueryType.
|
||||
func (QueryType) Enum() []any {
|
||||
return []any{
|
||||
QueryTypeBuilder,
|
||||
QueryTypeFormula,
|
||||
// Not yet supported.
|
||||
// QueryTypeSubQuery,
|
||||
// QueryTypeJoin,
|
||||
QueryTypeTraceOperator,
|
||||
QueryTypeClickHouseSQL,
|
||||
QueryTypePromQL,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for RequestType.
|
||||
func (RequestType) Enum() []any {
|
||||
return []any{
|
||||
RequestTypeScalar,
|
||||
RequestTypeTimeSeries,
|
||||
RequestTypeRaw,
|
||||
RequestTypeRawStream,
|
||||
RequestTypeTrace,
|
||||
// RequestTypeDistribution,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FunctionName.
|
||||
func (FunctionName) Enum() []any {
|
||||
return []any{
|
||||
FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for OrderDirection.
|
||||
func (OrderDirection) Enum() []any {
|
||||
return []any{
|
||||
OrderDirectionAsc,
|
||||
OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for ReduceTo.
|
||||
func (ReduceTo) Enum() []any {
|
||||
return []any{
|
||||
ReduceToSum,
|
||||
ReduceToCount,
|
||||
ReduceToAvg,
|
||||
ReduceToMin,
|
||||
ReduceToMax,
|
||||
ReduceToLast,
|
||||
ReduceToMedian,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for VariableType.
|
||||
func (VariableType) Enum() []any {
|
||||
return []any{
|
||||
QueryVariableType,
|
||||
DynamicVariableType,
|
||||
CustomVariableType,
|
||||
TextBoxVariableType,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for JoinType.
|
||||
func (JoinType) Enum() []any {
|
||||
return []any{
|
||||
JoinTypeInner,
|
||||
JoinTypeLeft,
|
||||
JoinTypeRight,
|
||||
JoinTypeFull,
|
||||
JoinTypeCross,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for ColumnType.
|
||||
func (ColumnType) Enum() []any {
|
||||
return []any{
|
||||
ColumnTypeGroup,
|
||||
ColumnTypeAggregation,
|
||||
}
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
|
||||
type queryEnvelopeBuilderTrace struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
|
||||
type queryEnvelopeBuilderLog struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
|
||||
type queryEnvelopeBuilderMetric struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
|
||||
type queryEnvelopeFormula struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
|
||||
// type queryEnvelopeJoin struct {
|
||||
// Type QueryType `json:"type" description:"The type of the query."`
|
||||
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
|
||||
// }
|
||||
|
||||
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
|
||||
type queryEnvelopeTraceOperator struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
|
||||
type queryEnvelopePromQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
|
||||
type queryEnvelopeClickHouseSQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryEnvelope{}
|
||||
|
||||
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
|
||||
// Each variant represents a different query type with its corresponding spec schema.
|
||||
func (QueryEnvelope) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
queryEnvelopeBuilderTrace{},
|
||||
queryEnvelopeBuilderLog{},
|
||||
queryEnvelopeBuilderMetric{},
|
||||
queryEnvelopeFormula{},
|
||||
// queryEnvelopeJoin{},
|
||||
queryEnvelopeTraceOperator{},
|
||||
queryEnvelopePromQL{},
|
||||
queryEnvelopeClickHouseSQL{},
|
||||
}
|
||||
}
|
||||
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
numSchema.WithDescription("Duration in seconds.")
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryData{}
|
||||
|
||||
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
|
||||
func (QueryData) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
TimeSeriesData{},
|
||||
ScalarData{},
|
||||
RawData{},
|
||||
}
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeRequest{}
|
||||
|
||||
// PrepareJSONSchema adds examples and description to the QueryRangeRequest schema.
|
||||
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
|
||||
schema.WithExamples(
|
||||
// 1. time_series + traces builder: count spans grouped by service, ordered by count
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "span_count",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{
|
||||
"expression": "service.name = 'frontend'",
|
||||
},
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "span_count"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 2. time_series + logs builder: count logs grouped by service
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "log_count",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{
|
||||
"expression": "severity_text = 'ERROR'",
|
||||
},
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "log_count"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 3. time_series + metrics builder (Gauge): latest value averaged across series
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "system.cpu.utilization",
|
||||
"timeAggregation": "latest",
|
||||
"spaceAggregation": "avg",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "host.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 4. time_series + metrics builder (Sum): rate of cumulative counter
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "http.server.duration.count",
|
||||
"timeAggregation": "rate",
|
||||
"spaceAggregation": "sum",
|
||||
},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 5. time_series + metrics builder (Histogram): p99 latency
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "http.server.duration.bucket",
|
||||
"spaceAggregation": "p99",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 6. raw + logs builder: fetch raw log records
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{
|
||||
"expression": "severity_text = 'ERROR'",
|
||||
},
|
||||
"selectFields": []any{
|
||||
map[string]any{
|
||||
"name": "body",
|
||||
"fieldContext": "log",
|
||||
},
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "timestamp", "fieldContext": "log"},
|
||||
"direction": "desc",
|
||||
},
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "id"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 7. raw + traces builder: fetch raw span records
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{
|
||||
"expression": "service.name = 'frontend' AND has_error = true",
|
||||
},
|
||||
"selectFields": []any{
|
||||
map[string]any{
|
||||
"name": "name",
|
||||
"fieldContext": "span",
|
||||
},
|
||||
map[string]any{
|
||||
"name": "duration_nano",
|
||||
"fieldContext": "span",
|
||||
},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{
|
||||
"key": map[string]any{"name": "timestamp", "fieldContext": "span"},
|
||||
"direction": "desc",
|
||||
},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 8. scalar + traces builder: total span count as a single value
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "span_count",
|
||||
},
|
||||
},
|
||||
"filter": map[string]any{
|
||||
"expression": "service.name = 'frontend'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 9. scalar + logs builder: total error log count as a single value
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
"alias": "error_count",
|
||||
},
|
||||
},
|
||||
"filter": map[string]any{
|
||||
"expression": "severity_text = 'ERROR'",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 10. scalar + metrics builder: single reduced value with reduceTo
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"metricName": "http.server.duration.count",
|
||||
"timeAggregation": "rate",
|
||||
"spaceAggregation": "sum",
|
||||
"reduceTo": "sum",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 11. builder formula: error rate from two trace queries
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "countIf(has_error = true)",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{
|
||||
"expression": "count()",
|
||||
},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{
|
||||
map[string]any{
|
||||
"name": "service.name",
|
||||
"fieldContext": "resource",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 12. PromQL query with UTF-8 dot metric name
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 13. ClickHouse SQL — time_series
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 14. ClickHouse SQL — raw
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "SELECT timestamp, body FROM signoz_logs.distributed_logs_v2 WHERE timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano AND severity_text = 'ERROR' ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
// 15. ClickHouse SQL — scalar
|
||||
map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "SELECT count() AS value FROM signoz_traces.distributed_signoz_index_v3 WHERE timestamp >= $start_datetime AND timestamp <= $end_datetime",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeResponse{}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
|
||||
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &CompositeQuery{}
|
||||
|
||||
// PrepareJSONSchema adds description to the CompositeQuery schema.
|
||||
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &ExecStats{}
|
||||
|
||||
// PrepareJSONSchema adds description to the ExecStats schema.
|
||||
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
|
||||
return nil
|
||||
}
|
||||
@@ -8,15 +8,15 @@ import (
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/prometheus/alertmanager/config"
|
||||
|
||||
signozError "github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
|
||||
|
||||
"github.com/prometheus/alertmanager/config"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type AlertType string
|
||||
@@ -40,12 +40,12 @@ const (
|
||||
|
||||
// PostableRule is used to create alerting rule from HTTP api
|
||||
type PostableRule struct {
|
||||
AlertName string `json:"alert,omitempty"`
|
||||
AlertType AlertType `json:"alertType,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
RuleType RuleType `json:"ruleType,omitempty"`
|
||||
EvalWindow Duration `json:"evalWindow,omitempty"`
|
||||
Frequency Duration `json:"frequency,omitempty"`
|
||||
AlertName string `json:"alert,omitempty"`
|
||||
AlertType AlertType `json:"alertType,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
RuleType RuleType `json:"ruleType,omitempty"`
|
||||
EvalWindow valuer.TextDuration `json:"evalWindow,omitempty"`
|
||||
Frequency valuer.TextDuration `json:"frequency,omitempty"`
|
||||
|
||||
RuleCondition *RuleCondition `json:"condition,omitempty"`
|
||||
Labels map[string]string `json:"labels,omitempty"`
|
||||
@@ -71,13 +71,13 @@ type NotificationSettings struct {
|
||||
Renotify Renotify `json:"renotify,omitempty"`
|
||||
UsePolicy bool `json:"usePolicy,omitempty"`
|
||||
// NewGroupEvalDelay is the grace period for new series to be excluded from alerts evaluation
|
||||
NewGroupEvalDelay *Duration `json:"newGroupEvalDelay,omitempty"`
|
||||
NewGroupEvalDelay valuer.TextDuration `json:"newGroupEvalDelay,omitzero"`
|
||||
}
|
||||
|
||||
type Renotify struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
ReNotifyInterval Duration `json:"interval,omitempty"`
|
||||
AlertStates []model.AlertState `json:"alertStates,omitempty"`
|
||||
Enabled bool `json:"enabled"`
|
||||
ReNotifyInterval valuer.TextDuration `json:"interval,omitzero"`
|
||||
AlertStates []model.AlertState `json:"alertStates,omitempty"`
|
||||
}
|
||||
|
||||
func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanagertypes.NotificationConfig {
|
||||
@@ -85,10 +85,10 @@ func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanager
|
||||
var noDataRenotifyInterval time.Duration
|
||||
if ns.Renotify.Enabled {
|
||||
if slices.Contains(ns.Renotify.AlertStates, model.StateNoData) {
|
||||
noDataRenotifyInterval = time.Duration(ns.Renotify.ReNotifyInterval)
|
||||
noDataRenotifyInterval = ns.Renotify.ReNotifyInterval.Duration()
|
||||
}
|
||||
if slices.Contains(ns.Renotify.AlertStates, model.StateFiring) {
|
||||
renotifyInterval = time.Duration(ns.Renotify.ReNotifyInterval)
|
||||
renotifyInterval = ns.Renotify.ReNotifyInterval.Duration()
|
||||
}
|
||||
} else {
|
||||
renotifyInterval = 8760 * time.Hour //1 year for no renotify substitute
|
||||
@@ -190,12 +190,12 @@ func (r *PostableRule) processRuleDefaults() {
|
||||
r.SchemaVersion = DefaultSchemaVersion
|
||||
}
|
||||
|
||||
if r.EvalWindow == 0 {
|
||||
r.EvalWindow = Duration(5 * time.Minute)
|
||||
if r.EvalWindow.IsZero() {
|
||||
r.EvalWindow = valuer.MustParseTextDuration("5m")
|
||||
}
|
||||
|
||||
if r.Frequency == 0 {
|
||||
r.Frequency = Duration(1 * time.Minute)
|
||||
if r.Frequency.IsZero() {
|
||||
r.Frequency = valuer.MustParseTextDuration("1m")
|
||||
}
|
||||
|
||||
if r.RuleCondition != nil {
|
||||
@@ -246,7 +246,7 @@ func (r *PostableRule) processRuleDefaults() {
|
||||
r.NotificationSettings = &NotificationSettings{
|
||||
Renotify: Renotify{
|
||||
Enabled: true,
|
||||
ReNotifyInterval: Duration(4 * time.Hour),
|
||||
ReNotifyInterval: valuer.MustParseTextDuration("4h"),
|
||||
AlertStates: []model.AlertState{model.StateFiring},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -171,10 +171,10 @@ func TestParseIntoRule(t *testing.T) {
|
||||
kind: RuleDataKindJson,
|
||||
expectError: false,
|
||||
validate: func(t *testing.T, rule *PostableRule) {
|
||||
if rule.EvalWindow != Duration(5*time.Minute) {
|
||||
if rule.EvalWindow.Duration() != 5*time.Minute {
|
||||
t.Errorf("Expected default eval window '5m', got '%v'", rule.EvalWindow)
|
||||
}
|
||||
if rule.Frequency != Duration(1*time.Minute) {
|
||||
if rule.Frequency.Duration() != time.Minute {
|
||||
t.Errorf("Expected default frequency '1m', got '%v'", rule.Frequency)
|
||||
}
|
||||
if rule.RuleCondition.CompositeQuery.BuilderQueries["A"].Expression != "A" {
|
||||
@@ -327,10 +327,10 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
|
||||
// Verify evaluation window matches rule settings
|
||||
if window, ok := rule.Evaluation.Spec.(RollingWindow); ok {
|
||||
if window.EvalWindow != rule.EvalWindow {
|
||||
if !window.EvalWindow.Equal(rule.EvalWindow) {
|
||||
t.Errorf("Expected Evaluation EvalWindow %v, got %v", rule.EvalWindow, window.EvalWindow)
|
||||
}
|
||||
if window.Frequency != rule.Frequency {
|
||||
if !window.Frequency.Equal(rule.Frequency) {
|
||||
t.Errorf("Expected Evaluation Frequency %v, got %v", rule.Frequency, window.Frequency)
|
||||
}
|
||||
} else {
|
||||
@@ -457,10 +457,10 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
t.Fatal("Expected Evaluation to be populated")
|
||||
}
|
||||
if window, ok := rule.Evaluation.Spec.(RollingWindow); ok {
|
||||
if window.EvalWindow != rule.EvalWindow {
|
||||
if !window.EvalWindow.Equal(rule.EvalWindow) {
|
||||
t.Errorf("Expected Evaluation EvalWindow to be overwritten to %v, got %v", rule.EvalWindow, window.EvalWindow)
|
||||
}
|
||||
if window.Frequency != rule.Frequency {
|
||||
if !window.Frequency.Equal(rule.Frequency) {
|
||||
t.Errorf("Expected Evaluation Frequency to be overwritten to %v, got %v", rule.Frequency, window.Frequency)
|
||||
}
|
||||
} else {
|
||||
@@ -504,7 +504,7 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
|
||||
t.Error("Expected Evaluation to be nil for v2")
|
||||
}
|
||||
|
||||
if rule.EvalWindow != Duration(5*time.Minute) {
|
||||
if rule.EvalWindow.Duration() != 5*time.Minute {
|
||||
t.Error("Expected default EvalWindow to be applied")
|
||||
}
|
||||
if rule.RuleType != RuleTypeThreshold {
|
||||
|
||||
@@ -19,36 +19,36 @@ var (
|
||||
|
||||
type Evaluation interface {
|
||||
NextWindowFor(curr time.Time) (time.Time, time.Time)
|
||||
GetFrequency() Duration
|
||||
GetFrequency() valuer.TextDuration
|
||||
}
|
||||
|
||||
type RollingWindow struct {
|
||||
EvalWindow Duration `json:"evalWindow"`
|
||||
Frequency Duration `json:"frequency"`
|
||||
EvalWindow valuer.TextDuration `json:"evalWindow"`
|
||||
Frequency valuer.TextDuration `json:"frequency"`
|
||||
}
|
||||
|
||||
func (rollingWindow RollingWindow) Validate() error {
|
||||
if rollingWindow.EvalWindow <= 0 {
|
||||
if !rollingWindow.EvalWindow.IsPositive() {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "evalWindow must be greater than zero")
|
||||
}
|
||||
if rollingWindow.Frequency <= 0 {
|
||||
if !rollingWindow.Frequency.IsPositive() {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rollingWindow RollingWindow) NextWindowFor(curr time.Time) (time.Time, time.Time) {
|
||||
return curr.Add(time.Duration(-rollingWindow.EvalWindow)), curr
|
||||
return curr.Add(-rollingWindow.EvalWindow.Duration()), curr
|
||||
}
|
||||
|
||||
func (rollingWindow RollingWindow) GetFrequency() Duration {
|
||||
func (rollingWindow RollingWindow) GetFrequency() valuer.TextDuration {
|
||||
return rollingWindow.Frequency
|
||||
}
|
||||
|
||||
type CumulativeWindow struct {
|
||||
Schedule CumulativeSchedule `json:"schedule"`
|
||||
Frequency Duration `json:"frequency"`
|
||||
Timezone string `json:"timezone"`
|
||||
Schedule CumulativeSchedule `json:"schedule"`
|
||||
Frequency valuer.TextDuration `json:"frequency"`
|
||||
Timezone string `json:"timezone"`
|
||||
}
|
||||
|
||||
type CumulativeSchedule struct {
|
||||
@@ -79,7 +79,7 @@ func (cumulativeWindow CumulativeWindow) Validate() error {
|
||||
if _, err := time.LoadLocation(cumulativeWindow.Timezone); err != nil {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "timezone is invalid")
|
||||
}
|
||||
if cumulativeWindow.Frequency <= 0 {
|
||||
if !cumulativeWindow.Frequency.IsPositive() {
|
||||
return errors.NewInvalidInputf(errors.CodeInvalidInput, "frequency must be greater than zero")
|
||||
}
|
||||
return nil
|
||||
@@ -150,8 +150,8 @@ func (cumulativeWindow CumulativeWindow) NextWindowFor(curr time.Time) (time.Tim
|
||||
return windowStart.In(time.UTC), currInTZ.In(time.UTC)
|
||||
}
|
||||
|
||||
func (cw CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time {
|
||||
schedule := cw.Schedule
|
||||
func (cumulativeWindow CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Location) time.Time {
|
||||
schedule := cumulativeWindow.Schedule
|
||||
|
||||
switch schedule.Type {
|
||||
case ScheduleTypeHourly:
|
||||
@@ -220,7 +220,7 @@ func (cw CumulativeWindow) getLastScheduleTime(curr time.Time, loc *time.Locatio
|
||||
}
|
||||
}
|
||||
|
||||
func (cumulativeWindow CumulativeWindow) GetFrequency() Duration {
|
||||
func (cumulativeWindow CumulativeWindow) GetFrequency() valuer.TextDuration {
|
||||
return cumulativeWindow.Frequency
|
||||
}
|
||||
|
||||
|
||||
@@ -4,33 +4,35 @@ import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func TestRollingWindow_EvaluationTime(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
evalWindow Duration
|
||||
evalWindow valuer.TextDuration
|
||||
current time.Time
|
||||
wantStart time.Time
|
||||
wantEnd time.Time
|
||||
}{
|
||||
{
|
||||
name: "5 minute rolling window",
|
||||
evalWindow: Duration(5 * time.Minute),
|
||||
evalWindow: valuer.MustParseTextDuration("5m"),
|
||||
current: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC),
|
||||
wantStart: time.Date(2023, 12, 1, 12, 25, 0, 0, time.UTC),
|
||||
wantEnd: time.Date(2023, 12, 1, 12, 30, 0, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
name: "1 hour rolling window",
|
||||
evalWindow: Duration(1 * time.Hour),
|
||||
evalWindow: valuer.MustParseTextDuration("1h"),
|
||||
current: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC),
|
||||
wantStart: time.Date(2023, 12, 1, 14, 45, 30, 0, time.UTC),
|
||||
wantEnd: time.Date(2023, 12, 1, 15, 45, 30, 0, time.UTC),
|
||||
},
|
||||
{
|
||||
name: "30 second rolling window",
|
||||
evalWindow: Duration(30 * time.Second),
|
||||
evalWindow: valuer.MustParseTextDuration("30s"),
|
||||
current: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC),
|
||||
wantStart: time.Date(2023, 12, 1, 12, 29, 45, 0, time.UTC),
|
||||
wantEnd: time.Date(2023, 12, 1, 12, 30, 15, 0, time.UTC),
|
||||
@@ -41,7 +43,7 @@ func TestRollingWindow_EvaluationTime(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
rw := &RollingWindow{
|
||||
EvalWindow: tt.evalWindow,
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
}
|
||||
|
||||
gotStart, gotEnd := rw.NextWindowFor(tt.current)
|
||||
@@ -69,7 +71,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
|
||||
Type: ScheduleTypeHourly,
|
||||
Minute: intPtr(15),
|
||||
},
|
||||
Frequency: Duration(5 * time.Minute),
|
||||
Frequency: valuer.MustParseTextDuration("5m"),
|
||||
Timezone: "UTC",
|
||||
},
|
||||
current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC),
|
||||
@@ -83,7 +85,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
|
||||
Hour: intPtr(9),
|
||||
Minute: intPtr(30),
|
||||
},
|
||||
Frequency: Duration(1 * time.Hour),
|
||||
Frequency: valuer.MustParseTextDuration("1h"),
|
||||
Timezone: "Asia/Kolkata",
|
||||
},
|
||||
current: time.Date(2025, 3, 15, 15, 30, 0, 0, time.UTC),
|
||||
@@ -98,7 +100,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
|
||||
Hour: intPtr(14),
|
||||
Minute: intPtr(0),
|
||||
},
|
||||
Frequency: Duration(24 * time.Hour),
|
||||
Frequency: valuer.MustParseTextDuration("24h"),
|
||||
Timezone: "America/New_York",
|
||||
},
|
||||
current: time.Date(2025, 3, 18, 19, 0, 0, 0, time.UTC), // Tuesday
|
||||
@@ -113,7 +115,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
|
||||
Hour: intPtr(0),
|
||||
Minute: intPtr(0),
|
||||
},
|
||||
Frequency: Duration(24 * time.Hour),
|
||||
Frequency: valuer.MustParseTextDuration("24h"),
|
||||
Timezone: "UTC",
|
||||
},
|
||||
current: time.Date(2025, 3, 15, 12, 0, 0, 0, time.UTC),
|
||||
@@ -125,7 +127,7 @@ func TestCumulativeWindow_NewScheduleSystem(t *testing.T) {
|
||||
Schedule: CumulativeSchedule{
|
||||
Type: ScheduleTypeHourly,
|
||||
},
|
||||
Frequency: Duration(5 * time.Minute),
|
||||
Frequency: valuer.MustParseTextDuration("5m"),
|
||||
Timezone: "UTC",
|
||||
},
|
||||
current: time.Date(2025, 3, 15, 14, 30, 0, 0, time.UTC),
|
||||
@@ -755,8 +757,8 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
|
||||
jsonInput: `{"kind":"rolling","spec":{"evalWindow":"5m","frequency":"1m"}}`,
|
||||
wantKind: RollingEvaluation,
|
||||
wantSpec: RollingWindow{
|
||||
EvalWindow: Duration(5 * time.Minute),
|
||||
Frequency: Duration(1 * time.Minute),
|
||||
EvalWindow: valuer.MustParseTextDuration("5m"),
|
||||
Frequency: valuer.MustParseTextDuration("1m"),
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -768,7 +770,7 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
|
||||
Type: ScheduleTypeHourly,
|
||||
Minute: intPtr(30),
|
||||
},
|
||||
Frequency: Duration(2 * time.Minute),
|
||||
Frequency: valuer.MustParseTextDuration("2m"),
|
||||
Timezone: "UTC",
|
||||
},
|
||||
},
|
||||
@@ -847,10 +849,10 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
|
||||
t.Fatalf("Expected RollingWindow spec, got %T", envelope.Spec)
|
||||
}
|
||||
wantSpec := tt.wantSpec.(RollingWindow)
|
||||
if gotSpec.EvalWindow != wantSpec.EvalWindow {
|
||||
if !gotSpec.EvalWindow.Equal(wantSpec.EvalWindow) {
|
||||
t.Errorf("RollingWindow.EvalWindow = %v, want %v", gotSpec.EvalWindow, wantSpec.EvalWindow)
|
||||
}
|
||||
if gotSpec.Frequency != wantSpec.Frequency {
|
||||
if !gotSpec.Frequency.Equal(wantSpec.Frequency) {
|
||||
t.Errorf("RollingWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency)
|
||||
}
|
||||
case CumulativeEvaluation:
|
||||
@@ -866,7 +868,7 @@ func TestEvaluationEnvelope_UnmarshalJSON(t *testing.T) {
|
||||
(gotSpec.Schedule.Minute != nil && wantSpec.Schedule.Minute != nil && *gotSpec.Schedule.Minute != *wantSpec.Schedule.Minute) {
|
||||
t.Errorf("CumulativeWindow.Schedule.Minute = %v, want %v", gotSpec.Schedule.Minute, wantSpec.Schedule.Minute)
|
||||
}
|
||||
if gotSpec.Frequency != wantSpec.Frequency {
|
||||
if !gotSpec.Frequency.Equal(wantSpec.Frequency) {
|
||||
t.Errorf("CumulativeWindow.Frequency = %v, want %v", gotSpec.Frequency, wantSpec.Frequency)
|
||||
}
|
||||
if gotSpec.Timezone != wantSpec.Timezone {
|
||||
|
||||
@@ -131,7 +131,7 @@ func (m *GettablePlannedMaintenance) checkDaily(currentTime time.Time, rec *Recu
|
||||
if candidate.After(currentTime) {
|
||||
candidate = candidate.AddDate(0, 0, -1)
|
||||
}
|
||||
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
|
||||
return currentTime.Sub(candidate) <= rec.Duration.Duration()
|
||||
}
|
||||
|
||||
// checkWeekly finds the most recent allowed occurrence by rebasing the recurrence’s
|
||||
@@ -160,7 +160,7 @@ func (m *GettablePlannedMaintenance) checkWeekly(currentTime time.Time, rec *Rec
|
||||
if candidate.After(currentTime) {
|
||||
candidate = candidate.AddDate(0, 0, -7)
|
||||
}
|
||||
if currentTime.Sub(candidate) <= time.Duration(rec.Duration) {
|
||||
if currentTime.Sub(candidate) <= rec.Duration.Duration() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
@@ -198,7 +198,7 @@ func (m *GettablePlannedMaintenance) checkMonthly(currentTime time.Time, rec *Re
|
||||
)
|
||||
}
|
||||
}
|
||||
return currentTime.Sub(candidate) <= time.Duration(rec.Duration)
|
||||
return currentTime.Sub(candidate) <= rec.Duration.Duration()
|
||||
}
|
||||
|
||||
func (m *GettablePlannedMaintenance) IsActive(now time.Time) bool {
|
||||
@@ -255,7 +255,7 @@ func (m *GettablePlannedMaintenance) Validate() error {
|
||||
if m.Schedule.Recurrence.RepeatType == "" {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing repeat type in the payload")
|
||||
}
|
||||
if m.Schedule.Recurrence.Duration == 0 {
|
||||
if m.Schedule.Recurrence.Duration.IsZero() {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeInvalidPlannedMaintenancePayload, "missing duration in the payload")
|
||||
}
|
||||
if m.Schedule.Recurrence.EndTime != nil && m.Schedule.Recurrence.EndTime.Before(m.Schedule.Recurrence.StartTime) {
|
||||
|
||||
@@ -3,6 +3,8 @@ package ruletypes
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// Helper function to create a time pointer
|
||||
@@ -25,7 +27,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "Europe/London",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2025, 3, 1, 0, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 24),
|
||||
Duration: valuer.MustParseTextDuration("24h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday, RepeatOnTuesday, RepeatOnWednesday, RepeatOnThursday, RepeatOnFriday, RepeatOnSunday},
|
||||
},
|
||||
@@ -42,7 +44,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
|
||||
Duration: Duration(time.Hour * 4), // Until Tuesday 02:00
|
||||
Duration: valuer.MustParseTextDuration("4h"), // Until Tuesday 02:00
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
|
||||
},
|
||||
@@ -59,7 +61,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
|
||||
Duration: Duration(time.Hour * 4), // Until Tuesday 02:00
|
||||
Duration: valuer.MustParseTextDuration("4h"), // Until Tuesday 02:00
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
|
||||
},
|
||||
@@ -76,7 +78,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 4, 1, 22, 0, 0, 0, time.UTC), // Monday 22:00
|
||||
Duration: Duration(time.Hour * 52), // Until Thursday 02:00
|
||||
Duration: valuer.MustParseTextDuration("52h"), // Until Thursday 02:00
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
|
||||
},
|
||||
@@ -93,7 +95,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 4, 2, 22, 0, 0, 0, time.UTC), // Tuesday 22:00
|
||||
Duration: Duration(time.Hour * 4), // Until Wednesday 02:00
|
||||
Duration: valuer.MustParseTextDuration("4h"), // Until Wednesday 02:00
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnTuesday}, // Only Tuesday
|
||||
},
|
||||
@@ -110,7 +112,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 23, 0, 0, 0, time.UTC), // 23:00
|
||||
Duration: Duration(time.Hour * 2), // Until 01:00 next day
|
||||
Duration: valuer.MustParseTextDuration("2h"), // Until 01:00 next day
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -126,7 +128,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -142,7 +144,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -158,7 +160,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 28, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 72), // 3 days
|
||||
Duration: valuer.MustParseTextDuration("72h"), // 3 days
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -174,7 +176,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 28, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 72), // 3 days
|
||||
Duration: valuer.MustParseTextDuration("72h"), // 3 days
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnSunday},
|
||||
},
|
||||
@@ -191,7 +193,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 30, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 48), // 2 days, crosses to Feb 1
|
||||
Duration: valuer.MustParseTextDuration("48h"), // 2 days, crosses to Feb 1
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -207,7 +209,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "America/New_York", // UTC-5 or UTC-4 depending on DST
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 22, 0, 0, 0, time.FixedZone("America/New_York", -5*3600)),
|
||||
Duration: Duration(time.Hour * 4),
|
||||
Duration: valuer.MustParseTextDuration("4h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -223,7 +225,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -240,7 +242,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
EndTime: timePtr(time.Date(2024, 1, 10, 12, 0, 0, 0, time.UTC)),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -256,7 +258,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 3, 31, 22, 0, 0, 0, time.UTC), // March 31, 22:00
|
||||
Duration: Duration(time.Hour * 6), // Until April 1, 04:00
|
||||
Duration: valuer.MustParseTextDuration("6h"), // Until April 1, 04:00
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -272,7 +274,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 4, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{}, // Empty - should apply to all days
|
||||
},
|
||||
@@ -289,7 +291,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -304,7 +306,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 23, 30, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 1), // Crosses to 00:30 next day
|
||||
Duration: valuer.MustParseTextDuration("1h"), // Crosses to 00:30 next day
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -319,7 +321,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -334,7 +336,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 30, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 48), // 2 days duration
|
||||
Duration: valuer.MustParseTextDuration("48h"), // 2 days duration
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -349,7 +351,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 4, 1, 23, 0, 0, 0, time.UTC), // Monday 23:00
|
||||
Duration: Duration(time.Hour * 2), // Until Tuesday 01:00
|
||||
Duration: valuer.MustParseTextDuration("2h"), // Until Tuesday 01:00
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday}, // Only Monday
|
||||
},
|
||||
@@ -365,7 +367,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC), // January 31st
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -380,7 +382,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 22, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 4), // Until 02:00 next day
|
||||
Duration: valuer.MustParseTextDuration("4h"), // Until 02:00 next day
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -395,7 +397,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 31, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -446,7 +448,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "US/Eastern",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2025, 3, 29, 20, 0, 0, 0, time.FixedZone("US/Eastern", -4*3600)),
|
||||
Duration: Duration(time.Hour * 24),
|
||||
Duration: valuer.MustParseTextDuration("24h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnSunday, RepeatOnSaturday},
|
||||
},
|
||||
@@ -462,7 +464,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -477,7 +479,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -492,7 +494,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeDaily,
|
||||
},
|
||||
},
|
||||
@@ -507,7 +509,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday},
|
||||
},
|
||||
@@ -523,7 +525,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday},
|
||||
},
|
||||
@@ -539,7 +541,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday},
|
||||
},
|
||||
@@ -555,7 +557,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday},
|
||||
},
|
||||
@@ -571,7 +573,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 01, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeWeekly,
|
||||
RepeatOn: []RepeatOn{RepeatOnMonday},
|
||||
},
|
||||
@@ -587,7 +589,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -602,7 +604,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
@@ -617,7 +619,7 @@ func TestShouldSkipMaintenance(t *testing.T) {
|
||||
Timezone: "UTC",
|
||||
Recurrence: &Recurrence{
|
||||
StartTime: time.Date(2024, 04, 04, 12, 0, 0, 0, time.UTC),
|
||||
Duration: Duration(time.Hour * 2),
|
||||
Duration: valuer.MustParseTextDuration("2h"),
|
||||
RepeatType: RepeatTypeMonthly,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type RepeatType string
|
||||
@@ -38,40 +38,12 @@ var RepeatOnAllMap = map[RepeatOn]time.Weekday{
|
||||
RepeatOnSaturday: time.Saturday,
|
||||
}
|
||||
|
||||
type Duration time.Duration
|
||||
|
||||
func (d Duration) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(time.Duration(d).String())
|
||||
}
|
||||
|
||||
func (d *Duration) UnmarshalJSON(b []byte) error {
|
||||
var v interface{}
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
*d = Duration(time.Duration(value))
|
||||
return nil
|
||||
case string:
|
||||
tmp, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*d = Duration(tmp)
|
||||
|
||||
return nil
|
||||
default:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
|
||||
}
|
||||
}
|
||||
|
||||
type Recurrence struct {
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime *time.Time `json:"endTime,omitempty"`
|
||||
Duration Duration `json:"duration"`
|
||||
RepeatType RepeatType `json:"repeatType"`
|
||||
RepeatOn []RepeatOn `json:"repeatOn"`
|
||||
StartTime time.Time `json:"startTime"`
|
||||
EndTime *time.Time `json:"endTime,omitempty"`
|
||||
Duration valuer.TextDuration `json:"duration"`
|
||||
RepeatType RepeatType `json:"repeatType"`
|
||||
RepeatOn []RepeatOn `json:"repeatOn"`
|
||||
}
|
||||
|
||||
func (r *Recurrence) Scan(src interface{}) error {
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
package telemetrytypes
|
||||
|
||||
// Enum returns the acceptable values for Signal.
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldContext.
|
||||
func (FieldContext) Enum() []any {
|
||||
return []any{
|
||||
FieldContextMetric,
|
||||
FieldContextLog,
|
||||
FieldContextSpan,
|
||||
// FieldContextTrace,
|
||||
FieldContextResource,
|
||||
// FieldContextScope,
|
||||
FieldContextAttribute,
|
||||
// FieldContextEvent,
|
||||
FieldContextBody,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for Source.
|
||||
func (Source) Enum() []any {
|
||||
return []any{
|
||||
SourceMeter,
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldDataType.
|
||||
func (FieldDataType) Enum() []any {
|
||||
return []any{
|
||||
FieldDataTypeString,
|
||||
FieldDataTypeBool,
|
||||
FieldDataTypeFloat64,
|
||||
FieldDataTypeInt64,
|
||||
FieldDataTypeNumber,
|
||||
// FieldDataTypeArrayString,
|
||||
// FieldDataTypeArrayFloat64,
|
||||
// FieldDataTypeArrayBool,
|
||||
// FieldDataTypeArrayInt64,
|
||||
// FieldDataTypeArrayNumber,
|
||||
}
|
||||
}
|
||||
159
pkg/valuer/text_duration.go
Normal file
159
pkg/valuer/text_duration.go
Normal file
@@ -0,0 +1,159 @@
|
||||
package valuer
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
)
|
||||
|
||||
var _ Valuer = (*TextDuration)(nil)
|
||||
|
||||
// TextDuration preserves the human-readable duration text as provided by the input.
|
||||
// It keeps the raw JSON bytes so serialization does not normalize values like
|
||||
// "90m" into "1h30m0s".
|
||||
type TextDuration struct {
|
||||
text string
|
||||
value time.Duration
|
||||
}
|
||||
|
||||
// ParseTextDuration parses a human-readable duration string.
|
||||
// This preserves the raw text so that it can be serialized back to JSON.
|
||||
func ParseTextDuration(s string) (TextDuration, error) {
|
||||
d, err := time.ParseDuration(s)
|
||||
if err != nil {
|
||||
return TextDuration{}, errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse duration text")
|
||||
}
|
||||
return TextDuration{text: s, value: d}, nil
|
||||
}
|
||||
|
||||
// MustParseTextDuration parses a human-readable duration string, preserving
|
||||
// the raw text and panics if an error occurs.
|
||||
func MustParseTextDuration(s string) TextDuration {
|
||||
d, err := ParseTextDuration(s)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
// Duration returns the [time.Duration] type.
|
||||
func (d TextDuration) Duration() time.Duration {
|
||||
return d.value
|
||||
}
|
||||
|
||||
// IsZero implements [Valuer].
|
||||
// It returns whether the parsed duration is zero.
|
||||
func (d TextDuration) IsZero() bool {
|
||||
return d.value == 0
|
||||
}
|
||||
|
||||
// IsPositive whether the duration is greater than zero.
|
||||
func (d TextDuration) IsPositive() bool {
|
||||
return d.value > 0
|
||||
}
|
||||
|
||||
// String implements the [fmt.Stringer] interface.
|
||||
func (d TextDuration) String() string {
|
||||
if len(d.text) > 0 {
|
||||
return d.text
|
||||
}
|
||||
return d.value.String()
|
||||
}
|
||||
|
||||
// StringValue implements [Valuer].
|
||||
func (d TextDuration) StringValue() string {
|
||||
return d.String()
|
||||
}
|
||||
|
||||
// MarshalJSON implements the [encoding/json.Marshaler] interface.
|
||||
// It serializes the duration value in a human-readable format (1h30m0s).
|
||||
// If the original text is available, it is returned as-is. Example: 90m is not normalized to 1h30m0s.
|
||||
func (d TextDuration) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(d.String())
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements the [encoding/json.Unmarshaler] interface.
|
||||
// It parses string or numeric durations, and stores the string representation.
|
||||
func (d *TextDuration) UnmarshalJSON(b []byte) error {
|
||||
var v interface{}
|
||||
if err := json.Unmarshal(b, &v); err != nil {
|
||||
return err
|
||||
}
|
||||
switch value := v.(type) {
|
||||
case float64:
|
||||
d.value = time.Duration(value)
|
||||
d.text = ""
|
||||
return nil
|
||||
|
||||
case string:
|
||||
tmp, err := time.ParseDuration(value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.value = tmp
|
||||
d.text = value
|
||||
return nil
|
||||
|
||||
default:
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid duration")
|
||||
}
|
||||
}
|
||||
|
||||
// MarshalText implements [encoding.TextMarshaler].
|
||||
func (d TextDuration) MarshalText() ([]byte, error) {
|
||||
return []byte(d.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements [encoding.TextUnmarshaler].
|
||||
func (d *TextDuration) UnmarshalText(text []byte) error {
|
||||
s := string(text)
|
||||
tmp, err := time.ParseDuration(s)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "failed to parse duration text")
|
||||
}
|
||||
d.value = tmp
|
||||
d.text = s
|
||||
return nil
|
||||
}
|
||||
|
||||
// Value implements [driver.Valuer] by delegating to the underlying duration.
|
||||
func (d TextDuration) Value() (driver.Value, error) {
|
||||
return d.String(), nil
|
||||
}
|
||||
|
||||
// Scan implements [database/sql.Scanner] to read the duration from the database.
|
||||
func (d *TextDuration) Scan(value any) error {
|
||||
if value == nil {
|
||||
d.value = 0
|
||||
d.text = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
switch v := value.(type) {
|
||||
case int64:
|
||||
d.value = time.Duration(v)
|
||||
d.text = ""
|
||||
return nil
|
||||
case []byte:
|
||||
return d.UnmarshalText(v)
|
||||
case string:
|
||||
return d.UnmarshalText([]byte(v))
|
||||
default:
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput,
|
||||
"cannot scan type %T into TextDuration", value)
|
||||
}
|
||||
}
|
||||
|
||||
// Equal reports the two TextDuration represent the same underlying duration values.
|
||||
//
|
||||
// Note that the String representations for them can be different.
|
||||
func (d TextDuration) Equal(d2 TextDuration) bool {
|
||||
return d.value == d2.value
|
||||
}
|
||||
|
||||
// Milliseconds returns the duration as an integer millisecond count.
|
||||
func (d TextDuration) Milliseconds() int64 {
|
||||
return d.value.Milliseconds()
|
||||
}
|
||||
135
pkg/valuer/text_duration_test.go
Normal file
135
pkg/valuer/text_duration_test.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package valuer
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestTextDuration(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
input string
|
||||
error bool
|
||||
duration time.Duration
|
||||
string string
|
||||
}{
|
||||
{
|
||||
name: "ParseTextDuration(10s)",
|
||||
input: "10s",
|
||||
duration: 10 * time.Second,
|
||||
string: "10s",
|
||||
},
|
||||
{
|
||||
name: "ParseTextDuration(90m)",
|
||||
input: "90m",
|
||||
duration: 90 * time.Minute,
|
||||
string: "90m",
|
||||
},
|
||||
{
|
||||
name: "ParseTextDuration(1h30m)",
|
||||
input: "1h30m",
|
||||
duration: 90 * time.Minute,
|
||||
string: "1h30m",
|
||||
},
|
||||
{
|
||||
name: "Invalid duration",
|
||||
input: "invalid",
|
||||
error: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var err error
|
||||
d, err := ParseTextDuration(tc.input)
|
||||
|
||||
if tc.error {
|
||||
assert.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tc.duration, d.Duration())
|
||||
assert.Equal(t, tc.string, d.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextDuration_MustParsePanics(t *testing.T) {
|
||||
assert.Panics(t, func() {
|
||||
MustParseTextDuration("not-a-duration")
|
||||
})
|
||||
}
|
||||
|
||||
func TestTextDuration_JSON(t *testing.T) {
|
||||
t.Run("RoundTrip", func(t *testing.T) {
|
||||
parsed, err := ParseTextDuration("90m")
|
||||
require.NoError(t, err)
|
||||
|
||||
data, err := json.Marshal(parsed)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, `"90m"`, string(data))
|
||||
|
||||
var decoded TextDuration
|
||||
require.NoError(t, json.Unmarshal([]byte(`"2h"`), &decoded))
|
||||
assert.Equal(t, 2*time.Hour, decoded.Duration())
|
||||
assert.Equal(t, "2h", decoded.String())
|
||||
})
|
||||
|
||||
t.Run("Numeric", func(t *testing.T) {
|
||||
var decoded TextDuration
|
||||
require.NoError(t, json.Unmarshal([]byte(`1000000000`), &decoded))
|
||||
assert.Equal(t, time.Second, decoded.Duration())
|
||||
assert.Equal(t, "1s", decoded.String())
|
||||
})
|
||||
|
||||
t.Run("Invalid", func(t *testing.T) {
|
||||
var decoded TextDuration
|
||||
assert.Error(t, json.Unmarshal([]byte(`true`), &decoded))
|
||||
assert.Error(t, json.Unmarshal([]byte(`"nope"`), &decoded))
|
||||
})
|
||||
}
|
||||
|
||||
func TestTextDurationTextMarshaling(t *testing.T) {
|
||||
parsed, err := ParseTextDuration("45s")
|
||||
require.NoError(t, err)
|
||||
|
||||
data, err := parsed.MarshalText()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "45s", string(data))
|
||||
|
||||
var decoded TextDuration
|
||||
require.NoError(t, decoded.UnmarshalText([]byte("2m")))
|
||||
assert.Equal(t, 2*time.Minute, decoded.Duration())
|
||||
assert.Equal(t, "2m", decoded.String())
|
||||
|
||||
assert.Error(t, decoded.UnmarshalText([]byte("invalid")))
|
||||
}
|
||||
|
||||
func TestTextDurationValueAndScan(t *testing.T) {
|
||||
parsed, err := ParseTextDuration("2s")
|
||||
require.NoError(t, err)
|
||||
|
||||
val, err := parsed.Value()
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "2s", val)
|
||||
|
||||
var scanned TextDuration
|
||||
|
||||
err = scanned.Scan(nil)
|
||||
require.NoError(t, err)
|
||||
assert.True(t, scanned.IsZero())
|
||||
assert.Equal(t, "0s", scanned.String())
|
||||
|
||||
err = scanned.Scan([]byte("3s"))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 3*time.Second, scanned.Duration())
|
||||
assert.Equal(t, "3s", scanned.String())
|
||||
|
||||
err = scanned.Scan(true)
|
||||
assert.Error(t, err)
|
||||
}
|
||||
Reference in New Issue
Block a user