mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-23 16:59:30 +00:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d6ba98148a | ||
|
|
953b9e8216 | ||
|
|
212f5f4120 | ||
|
|
465e07de83 |
@@ -326,6 +326,9 @@ components:
|
||||
type: string
|
||||
url:
|
||||
type: string
|
||||
required:
|
||||
- code
|
||||
- message
|
||||
type: object
|
||||
ErrorsResponseerroradditional:
|
||||
properties:
|
||||
@@ -1661,6 +1664,9 @@ components:
|
||||
$ref: '#/components/schemas/ErrorsJSON'
|
||||
status:
|
||||
type: string
|
||||
required:
|
||||
- status
|
||||
- error
|
||||
type: object
|
||||
RoletypesGettableResources:
|
||||
properties:
|
||||
|
||||
@@ -436,7 +436,7 @@ export interface ErrorsJSONDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
code?: string;
|
||||
code: string;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
@@ -444,7 +444,7 @@ export interface ErrorsJSONDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
message?: string;
|
||||
message: string;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -1985,11 +1985,11 @@ export enum Querybuildertypesv5VariableTypeDTO {
|
||||
text = 'text',
|
||||
}
|
||||
export interface RenderErrorResponseDTO {
|
||||
error?: ErrorsJSONDTO;
|
||||
error: ErrorsJSONDTO;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
status?: string;
|
||||
status: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -48,7 +48,9 @@ function ForgotPassword({
|
||||
}
|
||||
|
||||
try {
|
||||
ErrorResponseHandlerV2(mutationError as AxiosError<ErrorV2Resp>);
|
||||
ErrorResponseHandlerV2(
|
||||
(mutationError as unknown) as AxiosError<ErrorV2Resp>,
|
||||
);
|
||||
} catch (apiError) {
|
||||
return apiError as APIError;
|
||||
}
|
||||
|
||||
@@ -342,7 +342,7 @@ function MultiIngestionSettings(): JSX.Element {
|
||||
|
||||
useEffect(() => {
|
||||
if (isError) {
|
||||
showErrorNotification(notifications, error as AxiosError);
|
||||
showErrorNotification(notifications, (error as unknown) as AxiosError);
|
||||
}
|
||||
}, [error, isError, notifications]);
|
||||
|
||||
|
||||
@@ -86,9 +86,9 @@ export default function OnboardingIngestionDetails(): JSX.Element {
|
||||
<div className="ingestion-endpoint-section-error-container">
|
||||
<Typography.Text className="ingestion-endpoint-section-error-text error">
|
||||
<TriangleAlert size={14} />{' '}
|
||||
{(error as AxiosError<RenderErrorResponseDTO>)?.response?.data?.error
|
||||
?.message ||
|
||||
(error as AxiosError)?.message ||
|
||||
{((error as unknown) as AxiosError<RenderErrorResponseDTO>)?.response
|
||||
?.data?.error.message ||
|
||||
((error as unknown) as AxiosError)?.message ||
|
||||
'Something went wrong'}
|
||||
</Typography.Text>
|
||||
|
||||
|
||||
@@ -110,7 +110,7 @@ function AuthDomain(): JSX.Element {
|
||||
let errorResult: APIError | null = null;
|
||||
try {
|
||||
ErrorResponseHandlerV2(
|
||||
errorFetchingAuthDomainListResponse as AxiosError<ErrorV2Resp>,
|
||||
(errorFetchingAuthDomainListResponse as unknown) as AxiosError<ErrorV2Resp>,
|
||||
);
|
||||
} catch (error) {
|
||||
errorResult = error as APIError;
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
)
|
||||
|
||||
type JSON struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Code string `json:"code" required:"true"`
|
||||
Message string `json:"message" required:"true"`
|
||||
Url string `json:"url,omitempty"`
|
||||
Errors []responseerroradditional `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
@@ -21,8 +21,8 @@ type SuccessResponse struct {
|
||||
}
|
||||
|
||||
type ErrorResponse struct {
|
||||
Status string `json:"status"`
|
||||
Error *errors.JSON `json:"error"`
|
||||
Status string `json:"status" required:"true"`
|
||||
Error *errors.JSON `json:"error" required:"true"`
|
||||
}
|
||||
|
||||
func Success(rw http.ResponseWriter, httpCode int, data interface{}) {
|
||||
|
||||
@@ -3261,14 +3261,20 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, org
|
||||
metadata := metadataMap[name]
|
||||
|
||||
typ := string(metadata.MetricType)
|
||||
temporality := string(metadata.Temporality)
|
||||
isMonotonic := metadata.IsMonotonic
|
||||
|
||||
// Non-monotonic cumulative sums are treated as gauges
|
||||
if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
|
||||
typ = "Gauge"
|
||||
}
|
||||
|
||||
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
|
||||
key := v3.AttributeKey{
|
||||
Key: name,
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
Type: v3.AttributeKeyType(typ),
|
||||
IsMonotonic: metadata.IsMonotonic,
|
||||
IsColumn: true,
|
||||
Key: name,
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
Type: v3.AttributeKeyType(typ),
|
||||
IsColumn: true,
|
||||
}
|
||||
|
||||
if _, ok := seen[name+typ]; ok {
|
||||
@@ -3311,13 +3317,17 @@ func (r *ClickHouseReader) GetMeterAggregateAttributes(ctx context.Context, orgI
|
||||
return nil, fmt.Errorf("error while scanning meter name: %s", err.Error())
|
||||
}
|
||||
|
||||
// Non-monotonic cumulative sums are treated as gauges
|
||||
if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
|
||||
typ = "Gauge"
|
||||
}
|
||||
|
||||
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
|
||||
key := v3.AttributeKey{
|
||||
Key: name,
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
Type: v3.AttributeKeyType(typ),
|
||||
IsMonotonic: isMonotonic,
|
||||
IsColumn: true,
|
||||
Key: name,
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
Type: v3.AttributeKeyType(typ),
|
||||
IsColumn: true,
|
||||
}
|
||||
response.AttributeKeys = append(response.AttributeKeys, key)
|
||||
}
|
||||
@@ -5409,7 +5419,6 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
|
||||
t.metric_name AS metric_name,
|
||||
ANY_VALUE(t.description) AS description,
|
||||
ANY_VALUE(t.type) AS metric_type,
|
||||
ANY_VALUE(t.is_monotonic) AS metric_is_monotonic,
|
||||
ANY_VALUE(t.unit) AS metric_unit,
|
||||
uniq(t.fingerprint) AS timeseries,
|
||||
uniq(metric_name) OVER() AS total
|
||||
@@ -5441,7 +5450,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
|
||||
|
||||
for rows.Next() {
|
||||
var metric metrics_explorer.MetricDetail
|
||||
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.IsMonotonic, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
|
||||
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
|
||||
zap.L().Error("Error scanning metric row", zap.Error(err))
|
||||
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
@@ -5573,7 +5582,6 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
|
||||
if updatedMetrics.Description != "" {
|
||||
response.Metrics[i].Description = updatedMetrics.Description
|
||||
}
|
||||
response.Metrics[i].IsMonotonic = updatedMetrics.IsMonotonic
|
||||
}
|
||||
if samples, exists := samplesMap[response.Metrics[i].MetricName]; exists {
|
||||
response.Metrics[i].Samples = samples
|
||||
|
||||
@@ -444,6 +444,10 @@ func getQueryRangeForRelateMetricsList(metricName string, scores metrics_explore
|
||||
Filters: filters,
|
||||
}
|
||||
|
||||
if scores.MetricType == v3.MetricTypeSum && !scores.IsMonotonic && scores.Temporality == v3.Cumulative {
|
||||
scores.MetricType = v3.MetricTypeGauge
|
||||
}
|
||||
|
||||
switch scores.MetricType {
|
||||
case v3.MetricTypeGauge:
|
||||
query.TimeAggregation = v3.TimeAggregationAvg
|
||||
@@ -456,9 +460,8 @@ func getQueryRangeForRelateMetricsList(metricName string, scores metrics_explore
|
||||
}
|
||||
|
||||
query.AggregateAttribute = v3.AttributeKey{
|
||||
Key: metricName,
|
||||
Type: v3.AttributeKeyType(scores.MetricType),
|
||||
IsMonotonic: scores.IsMonotonic,
|
||||
Key: metricName,
|
||||
Type: v3.AttributeKeyType(scores.MetricType),
|
||||
}
|
||||
|
||||
query.StepInterval = 60
|
||||
@@ -549,6 +552,9 @@ func (receiver *SummaryService) GetInspectMetrics(ctx context.Context, params *m
|
||||
}
|
||||
|
||||
func (receiver *SummaryService) UpdateMetricsMetadata(ctx context.Context, orgID valuer.UUID, params *metrics_explorer.UpdateMetricsMetadataRequest) *model.ApiError {
|
||||
if params.MetricType == v3.MetricTypeSum && !params.IsMonotonic && params.Temporality == v3.Cumulative {
|
||||
params.MetricType = v3.MetricTypeGauge
|
||||
}
|
||||
metadata := model.UpdateMetricsMetadata{
|
||||
MetricName: params.MetricName,
|
||||
MetricType: params.MetricType,
|
||||
|
||||
@@ -36,7 +36,6 @@ type MetricDetail struct {
|
||||
TimeSeries uint64 `json:"timeseries"`
|
||||
Samples uint64 `json:"samples"`
|
||||
LastReceived int64 `json:"lastReceived"`
|
||||
IsMonotonic bool `json:"is_monotonic"`
|
||||
}
|
||||
|
||||
type TreeMapResponseItem struct {
|
||||
|
||||
@@ -381,12 +381,11 @@ func (t AttributeKeyType) String() string {
|
||||
}
|
||||
|
||||
type AttributeKey struct {
|
||||
Key string `json:"key"`
|
||||
DataType AttributeKeyDataType `json:"dataType"`
|
||||
Type AttributeKeyType `json:"type"`
|
||||
IsColumn bool `json:"isColumn"`
|
||||
IsMonotonic bool `json:"isMonotonic"`
|
||||
IsJSON bool `json:"isJSON"`
|
||||
Key string `json:"key"`
|
||||
DataType AttributeKeyDataType `json:"dataType"`
|
||||
Type AttributeKeyType `json:"type"`
|
||||
IsColumn bool `json:"isColumn"`
|
||||
IsJSON bool `json:"isJSON"`
|
||||
}
|
||||
|
||||
func (a AttributeKey) CacheKey() string {
|
||||
|
||||
@@ -258,7 +258,7 @@ func TestPrepareLinksToLogsV5(t *testing.T) {
|
||||
ts := time.UnixMilli(1753527163000)
|
||||
|
||||
link := rule.prepareLinksToLogs(context.Background(), ts, labels.Labels{})
|
||||
assert.Contains(t, link, "compositeQuery=%257B%2522queryType%2522%253A%2522builder%2522%252C%2522builder%2522%253A%257B%2522queryData%2522%253A%255B%257B%2522queryName%2522%253A%2522A%2522%252C%2522stepInterval%2522%253A60%252C%2522dataSource%2522%253A%2522logs%2522%252C%2522aggregateOperator%2522%253A%2522noop%2522%252C%2522aggregateAttribute%2522%253A%257B%2522key%2522%253A%2522%2522%252C%2522dataType%2522%253A%2522%2522%252C%2522type%2522%253A%2522%2522%252C%2522isColumn%2522%253Afalse%252C%2522isMonotonic%2522%253Afalse%252C%2522isJSON%2522%253Afalse%257D%252C%2522expression%2522%253A%2522A%2522%252C%2522disabled%2522%253Afalse%252C%2522limit%2522%253A0%252C%2522offset%2522%253A0%252C%2522pageSize%2522%253A0%252C%2522ShiftBy%2522%253A0%252C%2522IsAnomaly%2522%253Afalse%252C%2522QueriesUsedInFormula%2522%253Anull%252C%2522filter%2522%253A%257B%2522expression%2522%253A%2522service.name%2BEXISTS%2522%257D%257D%255D%252C%2522queryFormulas%2522%253A%255B%255D%257D%257D&timeRange=%7B%22start%22%3A1753526700000%2C%22end%22%3A1753527000000%2C%22pageSize%22%3A100%7D&startTime=1753526700000&endTime=1753527000000&options=%7B%22maxLines%22%3A0%2C%22format%22%3A%22%22%2C%22selectColumns%22%3Anull%7D")
|
||||
assert.Contains(t, link, "compositeQuery=%257B%2522queryType%2522%253A%2522builder%2522%252C%2522builder%2522%253A%257B%2522queryData%2522%253A%255B%257B%2522queryName%2522%253A%2522A%2522%252C%2522stepInterval%2522%253A60%252C%2522dataSource%2522%253A%2522logs%2522%252C%2522aggregateOperator%2522%253A%2522noop%2522%252C%2522aggregateAttribute%2522%253A%257B%2522key%2522%253A%2522%2522%252C%2522dataType%2522%253A%2522%2522%252C%2522type%2522%253A%2522%2522%252C%2522isColumn%2522%253Afalse%252C%2522isJSON%2522%253Afalse%257D%252C%2522expression%2522%253A%2522A%2522%252C%2522disabled%2522%253Afalse%252C%2522limit%2522%253A0%252C%2522offset%2522%253A0%252C%2522pageSize%2522%253A0%252C%2522ShiftBy%2522%253A0%252C%2522IsAnomaly%2522%253Afalse%252C%2522QueriesUsedInFormula%2522%253Anull%252C%2522filter%2522%253A%257B%2522expression%2522%253A%2522service.name%2BEXISTS%2522%257D%257D%255D%252C%2522queryFormulas%2522%253A%255B%255D%257D%257D&timeRange=%7B%22start%22%3A1753526700000%2C%22end%22%3A1753527000000%2C%22pageSize%22%3A100%7D&startTime=1753526700000&endTime=1753527000000&options=%7B%22maxLines%22%3A0%2C%22format%22%3A%22%22%2C%22selectColumns%22%3Anull%7D")
|
||||
}
|
||||
|
||||
func TestPrepareLinksToTracesV5(t *testing.T) {
|
||||
|
||||
@@ -26,6 +26,9 @@ const (
|
||||
IncreaseMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
|
||||
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
|
||||
RateWithStartTs = "multiIf(row_number() OVER rate_window = 1 AND earliest_start_ts < %d, NAN, row_number() OVER rate_window = 1, sum_all_values / (ts - earliest_start_ts), earliest_start_ts = lagInFrame(latest_start_ts, 1) OVER rate_window, (sum_all_values - lagInFrame(latest_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1)), sum_all_values / (ts - lagInFrame(ts, 1))) AS per_series_value"
|
||||
IncreaseWithStartTs = "multiIf(row_number() OVER rate_window = 1 AND earliest_start_ts < %d, NAN, row_number() OVER rate_window = 1, sum_all_values, earliest_start_ts = lagInFrame(latest_start_ts, 1) OVER rate_window, sum_all_values - lagInFrame(latest_value, 1) OVER rate_window, sum_all_values) AS per_series_value"
|
||||
)
|
||||
|
||||
type MetricQueryStatementBuilder struct {
|
||||
@@ -330,6 +333,9 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggregationCTE(
|
||||
if query.Aggregations[0].Temporality == metrictypes.Delta {
|
||||
return b.buildTemporalAggDelta(ctx, start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
} else if query.Aggregations[0].Temporality != metrictypes.Multiple {
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationIncrease || query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
return b.buildTemporalAggCumulativeOrUnspecifiedWithStartTs(ctx, start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
}
|
||||
return b.buildTemporalAggCumulativeOrUnspecified(ctx, start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
}
|
||||
return b.buildTemporalAggForMultipleTemporalities(ctx, start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
@@ -382,6 +388,101 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecifiedWithStartTs(
|
||||
ctx context.Context,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
timeSeriesCTEArgs []any,
|
||||
) (string, []any, error) {
|
||||
stepSec := int64(query.StepInterval.Seconds())
|
||||
|
||||
moreInfoQueryBuilder := sqlbuilder.NewSelectBuilder()
|
||||
moreInfoQueryBuilder.Select("fingerprint")
|
||||
moreInfoQueryBuilder.SelectMore(fmt.Sprintf(
|
||||
"toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(%d)) AS ts",
|
||||
stepSec,
|
||||
))
|
||||
moreInfoQueryBuilder.SelectMore("toDateTime(intDiv(start_timestamp_unix_milli, 1000)) AS start_ts")
|
||||
for _, g := range query.GroupBy {
|
||||
moreInfoQueryBuilder.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
moreInfoQueryBuilder.SelectMore(fmt.Sprintf("%s AS max_value", aggCol))
|
||||
|
||||
colWithLatestValue, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, metrictypes.TimeAggregationLatest, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
moreInfoQueryBuilder.SelectMore(fmt.Sprintf("%s AS latest_value", colWithLatestValue))
|
||||
moreInfoQueryBuilder.SelectMore("max(unix_milli) AS latest_timestamp")
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
moreInfoQueryBuilder.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
|
||||
moreInfoQueryBuilder.JoinWithOption(sqlbuilder.InnerJoin, timeSeriesCTE, "points.fingerprint = filtered_time_series.fingerprint")
|
||||
moreInfoQueryBuilder.Where(
|
||||
moreInfoQueryBuilder.In("metric_name", query.Aggregations[0].MetricName),
|
||||
moreInfoQueryBuilder.GTE("unix_milli", start),
|
||||
moreInfoQueryBuilder.LT("unix_milli", end),
|
||||
)
|
||||
moreInfoQueryBuilder.GroupBy("fingerprint", "ts", "start_ts")
|
||||
moreInfoQueryBuilder.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
moreInfoPerRowQuery, moreInfoPerRowArgs := moreInfoQueryBuilder.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
|
||||
innerQueryBuilder := sqlbuilder.NewSelectBuilder()
|
||||
innerQueryBuilder.Select("fingerprint")
|
||||
innerQueryBuilder.SelectMore("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
innerQueryBuilder.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
innerQueryBuilder.SelectMore("max(start_ts) AS latest_start_ts")
|
||||
innerQueryBuilder.SelectMore("min(start_ts) AS earliest_start_ts")
|
||||
innerQueryBuilder.SelectMore("sum(max_value) AS sum_all_values")
|
||||
innerQueryBuilder.SelectMore("argMax(latest_value, latest_timestamp) AS latest_value")
|
||||
|
||||
innerQueryBuilder.From(fmt.Sprintf("(%s)", moreInfoPerRowQuery))
|
||||
|
||||
innerQueryBuilder.GroupBy("fingerprint", "ts")
|
||||
innerQueryBuilder.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
innerQuery, innerQueryArgs := innerQueryBuilder.BuildWithFlavor(sqlbuilder.ClickHouse, moreInfoPerRowArgs...)
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithStartTs, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
wrapped.SelectMore("latest_value")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(rateExpr)
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, moreInfoPerRowArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(IncreaseWithStartTs, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
wrapped.SelectMore("latest_value")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(incExpr)
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, moreInfoPerRowArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
default:
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", innerQuery), innerQueryArgs, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
_ context.Context,
|
||||
start, end uint64,
|
||||
|
||||
Reference in New Issue
Block a user