Compare commits

...

11 Commits

Author SHA1 Message Date
Naman Verma
4b665812b6 Merge branch 'main' into nv/10282 2026-02-18 11:33:00 +05:30
Naman Verma
d5d9215d03 Merge branch 'main' into nv/10282 2026-02-17 18:02:16 +05:30
Naman Verma
3f3b46b4ea fix: remove other instances of metric type being changed 2026-02-17 18:00:58 +05:30
Naman Verma
a9616c1984 test: include ismonotonic in UT query 2026-02-17 18:00:50 +05:30
Naman Verma
c1136c5add Merge branch 'main' into nv/10282 2026-02-17 17:42:04 +05:30
Naman Verma
db9d3e1fca fix: take IsMonotonic from updated metadata as well 2026-02-17 17:41:43 +05:30
Naman Verma
ffccf44c6c fix: change json key to camelCase to match other fields 2026-02-17 11:06:34 +05:30
Naman Verma
cbe564b498 fix: same change for meters 2026-02-17 11:05:59 +05:30
Naman Verma
b6f9023c9b Merge branch 'main' into nv/10282 2026-02-17 10:59:26 +05:30
Naman Verma
da7f62a5d3 Merge branch 'main' into nv/10282 2026-02-16 11:48:33 +05:30
Naman Verma
052cb01e00 feat: include monotonicity in metrics' properties API responses 2026-02-16 09:16:17 +05:30
5 changed files with 24 additions and 36 deletions

View File

@@ -3261,20 +3261,14 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, org
metadata := metadataMap[name]
typ := string(metadata.MetricType)
temporality := string(metadata.Temporality)
isMonotonic := metadata.IsMonotonic
// Non-monotonic cumulative sums are treated as gauges
if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
typ = "Gauge"
}
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
key := v3.AttributeKey{
Key: name,
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyType(typ),
IsColumn: true,
Key: name,
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyType(typ),
IsMonotonic: metadata.IsMonotonic,
IsColumn: true,
}
if _, ok := seen[name+typ]; ok {
@@ -3317,17 +3311,13 @@ func (r *ClickHouseReader) GetMeterAggregateAttributes(ctx context.Context, orgI
return nil, fmt.Errorf("error while scanning meter name: %s", err.Error())
}
// Non-monotonic cumulative sums are treated as gauges
if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
typ = "Gauge"
}
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
key := v3.AttributeKey{
Key: name,
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyType(typ),
IsColumn: true,
Key: name,
DataType: v3.AttributeKeyDataTypeFloat64,
Type: v3.AttributeKeyType(typ),
IsMonotonic: isMonotonic,
IsColumn: true,
}
response.AttributeKeys = append(response.AttributeKeys, key)
}
@@ -5419,6 +5409,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
t.metric_name AS metric_name,
ANY_VALUE(t.description) AS description,
ANY_VALUE(t.type) AS metric_type,
ANY_VALUE(t.is_monotonic) AS metric_is_monotonic,
ANY_VALUE(t.unit) AS metric_unit,
uniq(t.fingerprint) AS timeseries,
uniq(metric_name) OVER() AS total
@@ -5450,7 +5441,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
for rows.Next() {
var metric metrics_explorer.MetricDetail
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.IsMonotonic, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
zap.L().Error("Error scanning metric row", zap.Error(err))
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
}
@@ -5582,6 +5573,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
if updatedMetrics.Description != "" {
response.Metrics[i].Description = updatedMetrics.Description
}
response.Metrics[i].IsMonotonic = updatedMetrics.IsMonotonic
}
if samples, exists := samplesMap[response.Metrics[i].MetricName]; exists {
response.Metrics[i].Samples = samples

View File

@@ -444,10 +444,6 @@ func getQueryRangeForRelateMetricsList(metricName string, scores metrics_explore
Filters: filters,
}
if scores.MetricType == v3.MetricTypeSum && !scores.IsMonotonic && scores.Temporality == v3.Cumulative {
scores.MetricType = v3.MetricTypeGauge
}
switch scores.MetricType {
case v3.MetricTypeGauge:
query.TimeAggregation = v3.TimeAggregationAvg
@@ -460,8 +456,9 @@ func getQueryRangeForRelateMetricsList(metricName string, scores metrics_explore
}
query.AggregateAttribute = v3.AttributeKey{
Key: metricName,
Type: v3.AttributeKeyType(scores.MetricType),
Key: metricName,
Type: v3.AttributeKeyType(scores.MetricType),
IsMonotonic: scores.IsMonotonic,
}
query.StepInterval = 60
@@ -552,9 +549,6 @@ func (receiver *SummaryService) GetInspectMetrics(ctx context.Context, params *m
}
func (receiver *SummaryService) UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, params *metrics_explorer.UpdateMetricsMetadataRequest) *model.ApiError {
if params.MetricType == v3.MetricTypeSum && !params.IsMonotonic && params.Temporality == v3.Cumulative {
params.MetricType = v3.MetricTypeGauge
}
metadata := model.UpdateMetricsMetadata{
MetricName: params.MetricName,
MetricType: params.MetricType,

View File

@@ -36,6 +36,7 @@ type MetricDetail struct {
TimeSeries uint64 `json:"timeseries"`
Samples uint64 `json:"samples"`
LastReceived int64 `json:"lastReceived"`
IsMonotonic bool `json:"is_monotonic"`
}
type TreeMapResponseItem struct {

View File

@@ -381,11 +381,12 @@ func (t AttributeKeyType) String() string {
}
type AttributeKey struct {
Key string `json:"key"`
DataType AttributeKeyDataType `json:"dataType"`
Type AttributeKeyType `json:"type"`
IsColumn bool `json:"isColumn"`
IsJSON bool `json:"isJSON"`
Key string `json:"key"`
DataType AttributeKeyDataType `json:"dataType"`
Type AttributeKeyType `json:"type"`
IsColumn bool `json:"isColumn"`
IsMonotonic bool `json:"isMonotonic"`
IsJSON bool `json:"isJSON"`
}
func (a AttributeKey) CacheKey() string {

View File

@@ -258,7 +258,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
ts := time.UnixMilli(1753527163000)
link := rule.prepareLinksToLogs(context.Background(), ts, labels.Labels{})
assert.Contains(t, link, "compositeQuery=%257B%2522queryType%2522%253A%2522builder%2522%252C%2522builder%2522%253A%257B%2522queryData%2522%253A%255B%257B%2522queryName%2522%253A%2522A%2522%252C%2522stepInterval%2522%253A60%252C%2522dataSource%2522%253A%2522logs%2522%252C%2522aggregateOperator%2522%253A%2522noop%2522%252C%2522aggregateAttribute%2522%253A%257B%2522key%2522%253A%2522%2522%252C%2522dataType%2522%253A%2522%2522%252C%2522type%2522%253A%2522%2522%252C%2522isColumn%2522%253Afalse%252C%2522isJSON%2522%253Afalse%257D%252C%2522expression%2522%253A%2522A%2522%252C%2522disabled%2522%253Afalse%252C%2522limit%2522%253A0%252C%2522offset%2522%253A0%252C%2522pageSize%2522%253A0%252C%2522ShiftBy%2522%253A0%252C%2522IsAnomaly%2522%253Afalse%252C%2522QueriesUsedInFormula%2522%253Anull%252C%2522filter%2522%253A%257B%2522expression%2522%253A%2522service.name%2BEXISTS%2522%257D%257D%255D%252C%2522queryFormulas%2522%253A%255B%255D%257D%257D&timeRange=%7B%22start%22%3A1753526700000%2C%22end%22%3A1753527000000%2C%22pageSize%22%3A100%7D&startTime=1753526700000&endTime=1753527000000&options=%7B%22maxLines%22%3A0%2C%22format%22%3A%22%22%2C%22selectColumns%22%3Anull%7D")
assert.Contains(t, link, "compositeQuery=%257B%2522queryType%2522%253A%2522builder%2522%252C%2522builder%2522%253A%257B%2522queryData%2522%253A%255B%257B%2522queryName%2522%253A%2522A%2522%252C%2522stepInterval%2522%253A60%252C%2522dataSource%2522%253A%2522logs%2522%252C%2522aggregateOperator%2522%253A%2522noop%2522%252C%2522aggregateAttribute%2522%253A%257B%2522key%2522%253A%2522%2522%252C%2522dataType%2522%253A%2522%2522%252C%2522type%2522%253A%2522%2522%252C%2522isColumn%2522%253Afalse%252C%2522isMonotonic%2522%253Afalse%252C%2522isJSON%2522%253Afalse%257D%252C%2522expression%2522%253A%2522A%2522%252C%2522disabled%2522%253Afalse%252C%2522limit%2522%253A0%252C%2522offset%2522%253A0%252C%2522pageSize%2522%253A0%252C%2522ShiftBy%2522%253A0%252C%2522IsAnomaly%2522%253Afalse%252C%2522QueriesUsedInFormula%2522%253Anull%252C%2522filter%2522%253A%257B%2522expression%2522%253A%2522service.name%2BEXISTS%2522%257D%257D%255D%252C%2522queryFormulas%2522%253A%255B%255D%257D%257D&timeRange=%7B%22start%22%3A1753526700000%2C%22end%22%3A1753527000000%2C%22pageSize%22%3A100%7D&startTime=1753526700000&endTime=1753527000000&options=%7B%22maxLines%22%3A0%2C%22format%22%3A%22%22%2C%22selectColumns%22%3Anull%7D")
}
func TestPrepareLinksToTracesV5(t *testing.T) {