mirror of
https://github.com/SigNoz/signoz.git
synced 2026-04-28 06:30:33 +01:00
Compare commits
1 Commits
nv/4325
...
issue_4785
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afee062eaf |
@@ -627,19 +627,29 @@ func convertTimeSeriesDataToScalar(tsData *qbtypes.TimeSeriesData, queryName str
|
||||
return &qbtypes.ScalarData{QueryName: queryName}
|
||||
}
|
||||
|
||||
columns := []*qbtypes.ColumnDescriptor{}
|
||||
|
||||
// Add group columns from first series
|
||||
if len(tsData.Aggregations[0].Series) > 0 {
|
||||
for _, label := range tsData.Aggregations[0].Series[0].Labels {
|
||||
columns = append(columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: label.Key,
|
||||
QueryName: queryName,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
// Series can have ragged label sets; build the column schema from the
|
||||
// union of all label keys (first-seen order) and fill rows by key lookup.
|
||||
keyOrder := []telemetrytypes.TelemetryFieldKey{}
|
||||
keyIndex := map[string]int{}
|
||||
for _, series := range tsData.Aggregations[0].Series {
|
||||
for _, label := range series.Labels {
|
||||
if _, ok := keyIndex[label.Key.Name]; ok {
|
||||
continue
|
||||
}
|
||||
keyIndex[label.Key.Name] = len(keyOrder)
|
||||
keyOrder = append(keyOrder, label.Key)
|
||||
}
|
||||
}
|
||||
|
||||
columns := make([]*qbtypes.ColumnDescriptor, 0, len(keyOrder)+len(tsData.Aggregations))
|
||||
for _, key := range keyOrder {
|
||||
columns = append(columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: key,
|
||||
QueryName: queryName,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
}
|
||||
|
||||
// Add aggregation columns
|
||||
for _, agg := range tsData.Aggregations {
|
||||
name := agg.Alias
|
||||
@@ -655,18 +665,18 @@ func convertTimeSeriesDataToScalar(tsData *qbtypes.TimeSeriesData, queryName str
|
||||
})
|
||||
}
|
||||
|
||||
// Build rows
|
||||
// Build rows.
|
||||
groupColCount := len(keyOrder)
|
||||
data := [][]any{}
|
||||
for seriesIdx, series := range tsData.Aggregations[0].Series {
|
||||
row := make([]any, len(columns))
|
||||
|
||||
// Add group values
|
||||
for i, label := range series.Labels {
|
||||
row[i] = label.Value
|
||||
// Place each label under its key's column (by lookup, not index).
|
||||
for _, label := range series.Labels {
|
||||
row[keyIndex[label.Key.Name]] = label.Value
|
||||
}
|
||||
|
||||
// Add aggregation values (last value)
|
||||
groupColCount := len(series.Labels)
|
||||
for aggIdx, agg := range tsData.Aggregations {
|
||||
if seriesIdx < len(agg.Series) && len(agg.Series[seriesIdx].Values) > 0 {
|
||||
lastValue := agg.Series[seriesIdx].Values[len(agg.Series[seriesIdx].Values)-1].Value
|
||||
|
||||
53
pkg/querier/postprocess_test.go
Normal file
53
pkg/querier/postprocess_test.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// Multiple series with different number of labels, shouldn't panic and should align labels correctly.
|
||||
func TestConvertTimeSeriesDataToScalar_RaggedLabels(t *testing.T) {
|
||||
label := func(name string, value any) *qbtypes.Label {
|
||||
return &qbtypes.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: value,
|
||||
}
|
||||
}
|
||||
series := func(labels []*qbtypes.Label, value float64) *qbtypes.TimeSeries {
|
||||
return &qbtypes.TimeSeries{
|
||||
Labels: labels,
|
||||
Values: []*qbtypes.TimeSeriesValue{{Timestamp: 1, Value: value}},
|
||||
}
|
||||
}
|
||||
|
||||
tsData := &qbtypes.TimeSeriesData{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{{
|
||||
Index: 0,
|
||||
Series: []*qbtypes.TimeSeries{
|
||||
series([]*qbtypes.Label{label("label_1", "orphan-0")}, 20),
|
||||
series([]*qbtypes.Label{label("label_1", "box-0"), label("label_2", "rpc-0")}, 10),
|
||||
},
|
||||
}},
|
||||
}
|
||||
|
||||
var sd *qbtypes.ScalarData
|
||||
require.NotPanics(t, func() {
|
||||
sd = convertTimeSeriesDataToScalar(tsData, "A")
|
||||
})
|
||||
|
||||
require.NotNil(t, sd)
|
||||
require.Len(t, sd.Columns, 3)
|
||||
assert.Equal(t, "label_1", sd.Columns[0].Name)
|
||||
assert.Equal(t, "label_2", sd.Columns[1].Name)
|
||||
assert.Equal(t, "__result_0", sd.Columns[2].Name)
|
||||
|
||||
require.Len(t, sd.Data, 2)
|
||||
assert.Equal(t, []any{"orphan-0", nil, 20.0}, sd.Data[0])
|
||||
assert.Equal(t, []any{"box-0", "rpc-0", 10.0}, sd.Data[1])
|
||||
}
|
||||
@@ -426,17 +426,9 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
nonExistentMetrics := []string{}
|
||||
var dormantMetricsWarningMsg string
|
||||
// internal metrics aren't user-controlled — skip errors/warnings for them since users can't act on them
|
||||
isInternalMetric := func(n string) bool { return strings.HasPrefix(n, "signoz.") || strings.HasPrefix(n, "signoz_") }
|
||||
externalMissingMetrics := make([]string, 0, len(missingMetrics))
|
||||
for _, m := range missingMetrics {
|
||||
if !isInternalMetric(m) {
|
||||
externalMissingMetrics = append(externalMissingMetrics, m)
|
||||
}
|
||||
}
|
||||
if len(externalMissingMetrics) > 0 {
|
||||
lastSeenInfo, _ := q.metadataStore.FetchLastSeenInfoMulti(ctx, externalMissingMetrics...)
|
||||
for _, missingMetricName := range externalMissingMetrics {
|
||||
if len(missingMetrics) > 0 {
|
||||
lastSeenInfo, _ := q.metadataStore.FetchLastSeenInfoMulti(ctx, missingMetrics...)
|
||||
for _, missingMetricName := range missingMetrics {
|
||||
if ts, ok := lastSeenInfo[missingMetricName]; ok && ts > 0 {
|
||||
continue
|
||||
}
|
||||
@@ -448,22 +440,24 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
return nil, errors.NewNotFoundf(errors.CodeNotFound, "the following metrics were not found: %s", strings.Join(nonExistentMetrics, ", "))
|
||||
}
|
||||
lastSeenStr := func(name string) string {
|
||||
ts := lastSeenInfo[name]
|
||||
ago := humanize.RelTime(time.UnixMilli(ts), time.Now(), "ago", "from now")
|
||||
return fmt.Sprintf("%s (last seen %s)", name, ago)
|
||||
if ts, ok := lastSeenInfo[name]; ok && ts > 0 {
|
||||
ago := humanize.RelTime(time.UnixMilli(ts), time.Now(), "ago", "from now")
|
||||
return fmt.Sprintf("%s (last seen %s)", name, ago)
|
||||
}
|
||||
return name // this case won't come cuz lastSeenStr is never called for metrics in nonExistentMetrics
|
||||
}
|
||||
if len(externalMissingMetrics) == 1 {
|
||||
dormantMetricsWarningMsg = fmt.Sprintf("no data found for the metric %s in the query time range", lastSeenStr(externalMissingMetrics[0]))
|
||||
if len(missingMetrics) == 1 {
|
||||
dormantMetricsWarningMsg = fmt.Sprintf("no data found for the metric %s in the query time range", lastSeenStr(missingMetrics[0]))
|
||||
} else {
|
||||
parts := make([]string, len(externalMissingMetrics))
|
||||
for i, m := range externalMissingMetrics {
|
||||
parts := make([]string, len(missingMetrics))
|
||||
for i, m := range missingMetrics {
|
||||
parts[i] = lastSeenStr(m)
|
||||
}
|
||||
dormantMetricsWarningMsg = fmt.Sprintf("no data found for the following metrics in the query time range: %s", strings.Join(parts, ", "))
|
||||
}
|
||||
}
|
||||
preseededResults := make(map[string]any)
|
||||
for _, name := range missingMetricQueries {
|
||||
for _, name := range missingMetricQueries { // at this point missing metrics will not have any non existent metrics, only normal ones
|
||||
switch req.RequestType {
|
||||
case qbtypes.RequestTypeTimeSeries:
|
||||
preseededResults[name] = &qbtypes.TimeSeriesData{QueryName: name}
|
||||
|
||||
@@ -640,32 +640,6 @@ def test_non_existent_metrics_returns_404(
|
||||
assert get_error_message(response.json()) == "could not find the metric whatevergoennnsgoeshere"
|
||||
|
||||
|
||||
def test_non_existent_internal_metrics_returns_no_warning(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
) -> None:
|
||||
|
||||
now = datetime.now(tz=UTC).replace(second=0, microsecond=0)
|
||||
metric_name = "signoz_calls_total"
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"doesnotreallymatter",
|
||||
"sum",
|
||||
)
|
||||
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
|
||||
start_2h = int((now - timedelta(hours=2)).timestamp() * 1000)
|
||||
response = make_query_request(signoz, token, start_2h, end_ms, [query])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
data = response.json()
|
||||
assert get_all_warnings(data) == []
|
||||
|
||||
|
||||
# Verify /api/v1/fields/values filters label values by metricNamespace prefix.
|
||||
# Inserts metrics under ns.a and ns.b, then asserts a specific prefix returns
|
||||
# only matching values while a common prefix returns both.
|
||||
|
||||
Reference in New Issue
Block a user