mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-09 03:02:20 +00:00
Compare commits
22 Commits
replace-pr
...
qb-json-fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bac0e5f499 | ||
|
|
c79373314a | ||
|
|
858cd287fa | ||
|
|
afdb674068 | ||
|
|
30a6721472 | ||
|
|
518dfcbe59 | ||
|
|
424127c27c | ||
|
|
2dcb817de1 | ||
|
|
391e889f96 | ||
|
|
9254b879a9 | ||
|
|
f6f8c78aaf | ||
|
|
3c99dfdfa5 | ||
|
|
ac62103228 | ||
|
|
491bf14bd0 | ||
|
|
6ed72519b8 | ||
|
|
fe910aaa0f | ||
|
|
0d362b3ba8 | ||
|
|
51e9ffb847 | ||
|
|
f497a154a2 | ||
|
|
659fa361ef | ||
|
|
84e77182f6 | ||
|
|
32619869e7 |
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -132,3 +132,6 @@
|
||||
|
||||
/frontend/src/pages/PublicDashboard/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/PublicDashboardContainer/ @SigNoz/pulse-frontend
|
||||
|
||||
## UplotV2
|
||||
/frontend/src/lib/uPlotV2/ @SigNoz/pulse-frontend
|
||||
11
README.md
11
README.md
@@ -66,6 +66,17 @@ Read [more](https://signoz.io/metrics-and-dashboards/).
|
||||
|
||||

|
||||
|
||||
### LLM Observability
|
||||
|
||||
Monitor and debug your LLM applications with comprehensive observability. Track LLM calls, analyze token usage, monitor performance, and gain insights into your AI application's behavior in production.
|
||||
|
||||
SigNoz LLM observability helps you understand how your language models are performing, identify issues with prompts and responses, track token usage and costs, and optimize your AI applications for better performance and reliability.
|
||||
|
||||
[Get started with LLM Observability →](https://signoz.io/docs/llm-observability/)
|
||||
|
||||

|
||||
|
||||
|
||||
### Alerts
|
||||
|
||||
Use alerts in SigNoz to get notified when anything unusual happens in your application. You can set alerts on any type of telemetry signal (logs, metrics, traces), create thresholds and set up a notification channel to get notified. Advanced features like alert history and anomaly detection can help you create smarter alerts.
|
||||
|
||||
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.110.0
|
||||
image: signoz/signoz:v0.110.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.110.0
|
||||
image: signoz/signoz:v0.110.1
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.110.0}
|
||||
image: signoz/signoz:${VERSION:-v0.110.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.110.0}
|
||||
image: signoz/signoz:${VERSION:-v0.110.1}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -50,7 +50,7 @@ type GetAnomaliesResponse struct {
|
||||
//
|
||||
// ^ ^
|
||||
// | |
|
||||
// (rounded value for past peiod) + (seasonal growth)
|
||||
// (rounded value for past period) + (seasonal growth)
|
||||
//
|
||||
// score = abs(value - prediction) / stddev (current_season_query)
|
||||
type anomalyQueryParams struct {
|
||||
@@ -74,12 +74,12 @@ type anomalyQueryParams struct {
|
||||
// : For daily seasonality, this is the query range params for the (now-2d-5m, now-1d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-2h-5m, now-1h)
|
||||
PastSeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal period to the current season
|
||||
// Past2SeasonQuery is the query range params for past 2 seasonal periods to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-3w-5m, now-2w)
|
||||
// : For daily seasonality, this is the query range params for the (now-3d-5m, now-2d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-3h-5m, now-2h)
|
||||
Past2SeasonQuery *v3.QueryRangeParamsV3
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal period to the current season
|
||||
// Past3SeasonQuery is the query range params for past 3 seasonal periods to the current season
|
||||
// Example: For weekly seasonality, this is the query range params for the (now-4w-5m, now-3w)
|
||||
// : For daily seasonality, this is the query range params for the (now-4d-5m, now-3d)
|
||||
// : For hourly seasonality, this is the query range params for the (now-4h-5m, now-3h)
|
||||
|
||||
@@ -234,6 +234,11 @@ func (r *AnomalyRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID, t
|
||||
}
|
||||
}
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
@@ -285,6 +290,11 @@ func (r *AnomalyRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUID,
|
||||
|
||||
queryResult := transition.ConvertV5TimeSeriesDataToV4Result(qbResult)
|
||||
|
||||
hasData := len(queryResult.AnomalyScores) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
scoresJSON, _ := json.Marshal(queryResult.AnomalyScores)
|
||||
|
||||
268
ee/query-service/rules/anomaly_test.go
Normal file
268
ee/query-service/rules/anomaly_test.go
Normal file
@@ -0,0 +1,268 @@
|
||||
package rules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/anomaly"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/clickhouseReader"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore/telemetrystoretest"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
// mockAnomalyProvider is a mock implementation of anomaly.Provider for testing.
|
||||
// We need this because the anomaly provider makes 6 different queries for various
|
||||
// time periods (current, past period, current season, past season, past 2 seasons,
|
||||
// past 3 seasons), making it cumbersome to create mock data.
|
||||
type mockAnomalyProvider struct {
|
||||
responses []*anomaly.GetAnomaliesResponse
|
||||
callCount int
|
||||
}
|
||||
|
||||
func (m *mockAnomalyProvider) GetAnomalies(ctx context.Context, orgID valuer.UUID, req *anomaly.GetAnomaliesRequest) (*anomaly.GetAnomaliesResponse, error) {
|
||||
if m.callCount >= len(m.responses) {
|
||||
return &anomaly.GetAnomaliesResponse{Results: []*v3.Result{}}, nil
|
||||
}
|
||||
resp := m.responses[m.callCount]
|
||||
m.callCount++
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
|
||||
// Test basic AlertOnAbsent functionality (without AbsentFor grace period)
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
evalTime := baseTime.Add(5 * time.Minute)
|
||||
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
alertOnAbsent bool
|
||||
expectAlerts int
|
||||
}{
|
||||
{
|
||||
description: "AlertOnAbsent=false",
|
||||
alertOnAbsent: false,
|
||||
expectAlerts: 0,
|
||||
},
|
||||
{
|
||||
description: "AlertOnAbsent=true",
|
||||
alertOnAbsent: true,
|
||||
expectAlerts: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AlertOnAbsent = c.alertOnAbsent
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule(
|
||||
"test-anomaly-rule",
|
||||
valuer.GenerateUUID(),
|
||||
&postableRule,
|
||||
reader,
|
||||
nil,
|
||||
logger,
|
||||
nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseNoData},
|
||||
}
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlerts, alertsFound)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
|
||||
// Test missing data alert with AbsentFor grace period
|
||||
// 1. Call Eval with data at time t1, to populate lastTimestampWithDatapoints
|
||||
// 2. Call Eval without data at time t2
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
|
||||
// Set target higher than test data so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test anomaly no data with AbsentFor",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: RuleTypeAnomaly,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypeBuilder,
|
||||
BuilderQueries: map[string]*v3.BuilderQuery{
|
||||
"A": {
|
||||
QueryName: "A",
|
||||
Expression: "A",
|
||||
DataSource: v3.DataSourceMetrics,
|
||||
Temporality: v3.Unspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
SelectedQuery: "A",
|
||||
Seasonality: "daily",
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test anomaly no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
responseNoData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
absentFor uint64
|
||||
timeBetweenEvals time.Duration
|
||||
expectAlertOnEval2 int
|
||||
}{
|
||||
{
|
||||
description: "WithinGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 4 * time.Minute,
|
||||
expectAlertOnEval2: 0,
|
||||
},
|
||||
{
|
||||
description: "AfterGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 6 * time.Minute,
|
||||
expectAlertOnEval2: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AbsentFor = c.absentFor
|
||||
|
||||
t1 := baseTime.Add(5 * time.Minute)
|
||||
t2 := t1.Add(c.timeBetweenEvals)
|
||||
|
||||
responseWithData := &anomaly.GetAnomaliesResponse{
|
||||
Results: []*v3.Result{
|
||||
{
|
||||
QueryName: "A",
|
||||
AnomalyScores: []*v3.Series{
|
||||
{
|
||||
Labels: map[string]string{"test": "label"},
|
||||
Points: []v3.Point{
|
||||
{Timestamp: baseTime.UnixMilli(), Value: 1.0},
|
||||
{Timestamp: baseTime.Add(time.Minute).UnixMilli(), Value: 1.5},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, nil)
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, nil, "", time.Second, nil, nil, options)
|
||||
|
||||
rule, err := NewAnomalyRule("test-anomaly-rule", valuer.GenerateUUID(), &postableRule, reader, nil, logger, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
rule.provider = &mockAnomalyProvider{
|
||||
responses: []*anomaly.GetAnomaliesResponse{responseWithData, responseNoData},
|
||||
}
|
||||
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, alertsFound1, "First eval with data should not alert")
|
||||
|
||||
alertsFound2, err := rule.Eval(context.Background(), t2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlertOnEval2, alertsFound2)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -105,6 +105,7 @@
|
||||
"i18next": "^21.6.12",
|
||||
"i18next-browser-languagedetector": "^6.1.3",
|
||||
"i18next-http-backend": "^1.3.2",
|
||||
"immer": "11.1.3",
|
||||
"jest": "^27.5.1",
|
||||
"js-base64": "^3.7.2",
|
||||
"less": "^4.1.2",
|
||||
|
||||
@@ -28,17 +28,16 @@ import {
|
||||
QUERY_BUILDER_OPERATORS_BY_KEY_TYPE,
|
||||
queryOperatorSuggestions,
|
||||
} from 'constants/antlrQueryConstants';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { debounce, isNull } from 'lodash-es';
|
||||
import { Info, TriangleAlert } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
IDetailedError,
|
||||
IQueryContext,
|
||||
IValidationResult,
|
||||
} from 'types/antlrQueryTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -207,14 +206,9 @@ function QuerySearch({
|
||||
const lastValueRef = useRef<string>('');
|
||||
const isMountedRef = useRef<boolean>(true);
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
() =>
|
||||
Object.values(selectedDashboard?.data?.variables || {})?.filter(
|
||||
(variable: IDashboardVariable) => variable.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
'DYNAMIC',
|
||||
'values',
|
||||
);
|
||||
|
||||
// Add back the generateOptions function and useEffect
|
||||
@@ -1069,7 +1063,7 @@ function QuerySearch({
|
||||
);
|
||||
|
||||
// Add dynamic variables suggestions for the current key
|
||||
const variableName = dynamicVariables?.find(
|
||||
const variableName = dashboardDynamicVariables?.find(
|
||||
(variable) => variable?.dynamicVariablesAttribute === keyName,
|
||||
)?.name;
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useGetPublicDashboardMeta } from 'hooks/dashboard/useGetPublicDashboardMeta';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
@@ -44,7 +45,7 @@ import {
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { sortLayout } from 'providers/Dashboard/util';
|
||||
import { DashboardData, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
import { Props } from 'types/api/dashboard/update';
|
||||
import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
@@ -56,7 +57,11 @@ import { Base64Icons } from '../DashboardSettings/General/utils';
|
||||
import DashboardVariableSelection from '../DashboardVariablesSelection';
|
||||
import SettingsDrawer from './SettingsDrawer';
|
||||
import { VariablesSettingsTab } from './types';
|
||||
import { DEFAULT_ROW_NAME, downloadObjectAsJson } from './utils';
|
||||
import {
|
||||
DEFAULT_ROW_NAME,
|
||||
downloadObjectAsJson,
|
||||
sanitizeDashboardData,
|
||||
} from './utils';
|
||||
|
||||
import './Description.styles.scss';
|
||||
|
||||
@@ -64,28 +69,6 @@ interface DashboardDescriptionProps {
|
||||
handle: FullScreenHandle;
|
||||
}
|
||||
|
||||
export function sanitizeDashboardData(
|
||||
selectedData: DashboardData,
|
||||
): DashboardData {
|
||||
if (!selectedData?.variables) {
|
||||
return selectedData;
|
||||
}
|
||||
|
||||
const updatedVariables = Object.entries(selectedData.variables).reduce(
|
||||
(acc, [key, value]) => {
|
||||
const { selectedValue: _selectedValue, ...rest } = value;
|
||||
acc[key] = rest;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, IDashboardVariable>,
|
||||
);
|
||||
|
||||
return {
|
||||
...selectedData,
|
||||
variables: updatedVariables,
|
||||
};
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
@@ -119,6 +102,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
uuid: selectedDashboard.id,
|
||||
}
|
||||
: ({} as DashboardData);
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const { title = '', description, tags, image = Base64Icons[0] } =
|
||||
selectedData || {};
|
||||
@@ -576,7 +560,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
<section className="dashboard-description-section">{description}</section>
|
||||
)}
|
||||
|
||||
{!isEmpty(selectedData.variables) && (
|
||||
{!isEmpty(dashboardVariables) && (
|
||||
<section className="dashboard-variables">
|
||||
<DashboardVariableSelection />
|
||||
</section>
|
||||
|
||||
@@ -1,3 +1,27 @@
|
||||
import { DashboardData, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export function sanitizeDashboardData(
|
||||
selectedData: DashboardData,
|
||||
): DashboardData {
|
||||
if (!selectedData?.variables) {
|
||||
return selectedData;
|
||||
}
|
||||
|
||||
const updatedVariables = Object.entries(selectedData.variables).reduce(
|
||||
(acc, [key, value]) => {
|
||||
const { selectedValue: _selectedValue, ...rest } = value;
|
||||
acc[key] = rest;
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, IDashboardVariable>,
|
||||
);
|
||||
|
||||
return {
|
||||
...selectedData,
|
||||
variables: updatedVariables,
|
||||
};
|
||||
}
|
||||
|
||||
export function downloadObjectAsJson(
|
||||
exportObj: unknown,
|
||||
exportName: string,
|
||||
|
||||
@@ -14,10 +14,8 @@ import { CustomSelect } from 'components/NewSelect';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
import { PANEL_GROUP_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import {
|
||||
createDynamicVariableToWidgetsMap,
|
||||
getWidgetsHavingDynamicVariableAttribute,
|
||||
} from 'hooks/dashboard/utils';
|
||||
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
|
||||
import { getWidgetsHavingDynamicVariableAttribute } from 'hooks/dashboard/utils';
|
||||
import { useGetFieldValues } from 'hooks/dynamicVariables/useGetFieldValues';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { commaValuesParser } from 'lib/dashbaordVariables/customCommaValuesParser';
|
||||
@@ -243,23 +241,11 @@ function VariableItem({
|
||||
const [selectedWidgets, setSelectedWidgets] = useState<string[]>([]);
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
|
||||
|
||||
useEffect(() => {
|
||||
const dynamicVariables = Object.values(
|
||||
selectedDashboard?.data?.variables || {},
|
||||
)?.filter((variable: IDashboardVariable) => variable.type === 'DYNAMIC');
|
||||
|
||||
const widgets =
|
||||
selectedDashboard?.data?.widgets?.filter(
|
||||
(widget) => widget.panelTypes !== PANEL_GROUP_TYPES.ROW,
|
||||
) || [];
|
||||
const widgetsHavingDynamicVariables = createDynamicVariableToWidgetsMap(
|
||||
dynamicVariables,
|
||||
widgets as Widgets[],
|
||||
);
|
||||
|
||||
if (variableData?.id && variableData.id in widgetsHavingDynamicVariables) {
|
||||
setSelectedWidgets(widgetsHavingDynamicVariables[variableData.id] || []);
|
||||
if (variableData?.id && variableData.id in widgetsByDynamicVariableId) {
|
||||
setSelectedWidgets(widgetsByDynamicVariableId[variableData.id] || []);
|
||||
} else if (dynamicVariablesSelectedValue?.name) {
|
||||
const widgets = getWidgetsHavingDynamicVariableAttribute(
|
||||
dynamicVariablesSelectedValue?.name,
|
||||
@@ -275,6 +261,7 @@ function VariableItem({
|
||||
selectedDashboard,
|
||||
variableData.id,
|
||||
variableData.name,
|
||||
widgetsByDynamicVariableId,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import React, { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import React, { useEffect, useRef, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { HolderOutlined, PlusOutlined } from '@ant-design/icons';
|
||||
import type { DragEndEvent, UniqueIdentifier } from '@dnd-kit/core';
|
||||
@@ -17,11 +17,13 @@ import { RowProps } from 'antd/lib';
|
||||
import { VariablesSettingsTabHandle } from 'container/DashboardContainer/DashboardDescription/types';
|
||||
import { convertVariablesToDbFormat } from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
import { useAddDynamicVariableToPanels } from 'hooks/dashboard/useAddDynamicVariableToPanels';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { PenLine, Trash2 } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { TVariableMode } from './types';
|
||||
import VariableItem from './VariableItem/VariableItem';
|
||||
@@ -91,13 +93,10 @@ function VariablesSettings({
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
|
||||
const { selectedDashboard, setSelectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const { notifications } = useNotifications();
|
||||
|
||||
const variables = useMemo(() => selectedDashboard?.data?.variables || {}, [
|
||||
selectedDashboard?.data?.variables,
|
||||
]);
|
||||
|
||||
const [variablesTableData, setVariablesTableData] = useState<any>([]);
|
||||
const [variblesOrderArr, setVariablesOrderArr] = useState<number[]>([]);
|
||||
const [existingVariableNamesMap, setExistingVariableNamesMap] = useState<
|
||||
@@ -147,13 +146,13 @@ function VariablesSettings({
|
||||
const variableNamesMap = {};
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
for (const [key, value] of Object.entries(dashboardVariables)) {
|
||||
const { order, id, name } = value;
|
||||
|
||||
tableRowData.push({
|
||||
key,
|
||||
name: key,
|
||||
...variables[key],
|
||||
...dashboardVariables[key],
|
||||
id,
|
||||
});
|
||||
|
||||
@@ -174,10 +173,10 @@ function VariablesSettings({
|
||||
setVariablesTableData(tableRowData);
|
||||
setVariablesOrderArr(variableOrderArr);
|
||||
setExistingVariableNamesMap(variableNamesMap);
|
||||
}, [variables]);
|
||||
}, [dashboardVariables]);
|
||||
|
||||
const updateVariables = (
|
||||
updatedVariablesData: Dashboard['data']['variables'],
|
||||
updatedVariablesData: IDashboardVariables,
|
||||
currentRequestedId?: string,
|
||||
widgetIds?: string[],
|
||||
applyToAll?: boolean,
|
||||
@@ -312,7 +311,7 @@ function VariablesSettings({
|
||||
currentVariableId?: string,
|
||||
): boolean => {
|
||||
// Check if any other dynamic variable already uses this attribute key
|
||||
const isDuplicateAttributeKey = Object.values(variables).some(
|
||||
const isDuplicateAttributeKey = Object.values(dashboardVariables).some(
|
||||
(variable: IDashboardVariable) =>
|
||||
variable.type === 'DYNAMIC' &&
|
||||
variable.dynamicVariablesAttribute === attributeKey &&
|
||||
@@ -422,7 +421,7 @@ function VariablesSettings({
|
||||
{variableViewMode ? (
|
||||
<VariableItem
|
||||
variableData={{ ...variableEditData } as IDashboardVariable}
|
||||
existingVariables={variables}
|
||||
existingVariables={dashboardVariables}
|
||||
onSave={onVariableSaveHandler}
|
||||
onCancel={onDoneVariableViewMode}
|
||||
validateName={validateVariableName}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { memo, useEffect, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Row } from 'antd';
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
@@ -33,9 +34,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
|
||||
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
|
||||
|
||||
const { data } = selectedDashboard || {};
|
||||
|
||||
const { variables } = data || {};
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const [variablesTableData, setVariablesTableData] = useState<any>([]);
|
||||
|
||||
@@ -48,29 +47,31 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (variables) {
|
||||
const tableRowData = [];
|
||||
const tableRowData = [];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
const { id } = value;
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const [key, value] of Object.entries(dashboardVariables)) {
|
||||
const { id } = value;
|
||||
|
||||
tableRowData.push({
|
||||
key,
|
||||
name: key,
|
||||
...variables[key],
|
||||
id,
|
||||
});
|
||||
}
|
||||
|
||||
tableRowData.sort((a, b) => a.order - b.order);
|
||||
|
||||
setVariablesTableData(tableRowData);
|
||||
|
||||
// Initialize variables with default values if not in URL
|
||||
initializeDefaultVariables(variables, getUrlVariables, updateUrlVariable);
|
||||
tableRowData.push({
|
||||
key,
|
||||
name: key,
|
||||
...dashboardVariables[key],
|
||||
id,
|
||||
});
|
||||
}
|
||||
}, [getUrlVariables, updateUrlVariable, variables]);
|
||||
|
||||
tableRowData.sort((a, b) => a.order - b.order);
|
||||
|
||||
setVariablesTableData(tableRowData);
|
||||
|
||||
// Initialize variables with default values if not in URL
|
||||
initializeDefaultVariables(
|
||||
dashboardVariables,
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
);
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
useEffect(() => {
|
||||
if (variablesTableData.length > 0) {
|
||||
@@ -94,7 +95,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
cycleNodes,
|
||||
});
|
||||
}
|
||||
}, [variables, variablesTableData]);
|
||||
}, [dashboardVariables, variablesTableData]);
|
||||
|
||||
// this handles the case where the dependency order changes i.e. variable list updated via creation or deletion etc. and we need to refetch the variables
|
||||
// also trigger when the global time changes
|
||||
@@ -122,7 +123,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
if (id) {
|
||||
// For dynamic variables, only store in localStorage when NOT allSelected
|
||||
// This makes localStorage much lighter by avoiding storing all individual values
|
||||
const variable = variables?.[id] || variables?.[name];
|
||||
const variable = dashboardVariables?.[id] || dashboardVariables?.[name];
|
||||
const isDynamic = variable?.type === 'DYNAMIC';
|
||||
updateLocalStorageDashboardVariables(name, value, allSelected, isDynamic);
|
||||
|
||||
@@ -185,7 +186,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
}
|
||||
};
|
||||
|
||||
if (!variables) {
|
||||
if (!dashboardVariables) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -202,7 +203,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
variable.type === 'DYNAMIC' ? (
|
||||
<DynamicVariableSelection
|
||||
key={`${variable.name}${variable.id}${variable.order}`}
|
||||
existingVariables={variables}
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={{
|
||||
name: variable.name,
|
||||
...variable,
|
||||
@@ -212,7 +213,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
) : (
|
||||
<VariableItem
|
||||
key={`${variable.name}${variable.id}}${variable.order}`}
|
||||
existingVariables={variables}
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={{
|
||||
name: variable.name,
|
||||
...variable,
|
||||
|
||||
@@ -3,7 +3,8 @@ import { useCallback } from 'react';
|
||||
import { useAddDynamicVariableToPanels } from 'hooks/dashboard/useAddDynamicVariableToPanels';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { convertVariablesToDbFormat } from './util';
|
||||
@@ -27,7 +28,7 @@ interface UseDashboardVariableUpdateReturn {
|
||||
widgetId?: string,
|
||||
) => void;
|
||||
updateVariables: (
|
||||
updatedVariablesData: Dashboard['data']['variables'],
|
||||
updatedVariablesData: IDashboardVariables,
|
||||
currentRequestedId?: string,
|
||||
widgetIds?: string[],
|
||||
applyToAll?: boolean,
|
||||
@@ -106,7 +107,7 @@ export const useDashboardVariableUpdate = (): UseDashboardVariableUpdateReturn =
|
||||
|
||||
const updateVariables = useCallback(
|
||||
(
|
||||
updatedVariablesData: Dashboard['data']['variables'],
|
||||
updatedVariablesData: IDashboardVariables,
|
||||
currentRequestedId?: string,
|
||||
widgetIds?: string[],
|
||||
applyToAll?: boolean,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { OptionData } from 'components/NewSelect/types';
|
||||
import { isEmpty, isNull } from 'lodash-es';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export function areArraysEqual(
|
||||
a: (string | number | boolean)[],
|
||||
@@ -21,7 +22,7 @@ export function areArraysEqual(
|
||||
|
||||
export const convertVariablesToDbFormat = (
|
||||
variblesArr: IDashboardVariable[],
|
||||
): Dashboard['data']['variables'] =>
|
||||
): IDashboardVariables =>
|
||||
variblesArr.reduce((result, obj: IDashboardVariable) => {
|
||||
const { id } = obj;
|
||||
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
import { LegendConfig, LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
|
||||
export interface ChartDimensions {
|
||||
width: number;
|
||||
height: number;
|
||||
legendWidth: number;
|
||||
legendHeight: number;
|
||||
legendsPerSet: number;
|
||||
}
|
||||
|
||||
const AVG_CHAR_WIDTH = 8;
|
||||
const DEFAULT_AVG_LABEL_LENGTH = 15;
|
||||
const LEGEND_GAP = 16;
|
||||
const LEGEND_PADDING = 12;
|
||||
const LEGEND_LINE_HEIGHT = 36;
|
||||
const MAX_LEGEND_WIDTH = 400;
|
||||
|
||||
/**
|
||||
* Average text width from series labels (for legendsPerSet).
|
||||
*/
|
||||
export function calculateAverageLegendWidth(legends: string[]): number {
|
||||
if (legends.length === 0) {
|
||||
return DEFAULT_AVG_LABEL_LENGTH;
|
||||
}
|
||||
const averageLabelLength =
|
||||
legends.reduce((sum, l) => sum + l.length, 0) / legends.length;
|
||||
return averageLabelLength * AVG_CHAR_WIDTH;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute how much space to give to the chart area vs. the legend.
|
||||
*
|
||||
* - For a RIGHT legend, we reserve a vertical column on the right and shrink the chart width.
|
||||
* - For a BOTTOM legend, we reserve up to two rows below the chart and shrink the chart height.
|
||||
*
|
||||
* Implementation details (high level):
|
||||
* - Approximates legend item width from label text length, using a fixed average char width.
|
||||
* - RIGHT legend:
|
||||
* - `legendWidth` is clamped between 150px and min(MAX_LEGEND_WIDTH, 30% of container width).
|
||||
* - Chart width is `containerWidth - legendWidth`.
|
||||
* - BOTTOM legend:
|
||||
* - Computes how many items fit per row, then uses at most 2 rows.
|
||||
* - `legendHeight` is derived from row count, capped by both a fixed pixel max and a % of container height.
|
||||
* - Chart height is `containerHeight - legendHeight`, never below 0.
|
||||
* - `legendsPerSet` is the number of legend items that fit horizontally, based on the same text-width approximation.
|
||||
*
|
||||
* The returned values are the final chart and legend rectangles (width/height),
|
||||
* plus `legendsPerSet` which hints how many legend items to show per row.
|
||||
*/
|
||||
export function calculateChartDimensions({
|
||||
containerWidth,
|
||||
containerHeight,
|
||||
legendConfig,
|
||||
seriesLabels,
|
||||
}: {
|
||||
containerWidth: number;
|
||||
containerHeight: number;
|
||||
legendConfig: LegendConfig;
|
||||
seriesLabels: string[];
|
||||
}): ChartDimensions {
|
||||
// Guard: no space to lay out chart or legend
|
||||
if (containerWidth <= 0 || containerHeight <= 0) {
|
||||
return {
|
||||
width: 0,
|
||||
height: 0,
|
||||
legendWidth: 0,
|
||||
legendHeight: 0,
|
||||
legendsPerSet: 0,
|
||||
};
|
||||
}
|
||||
|
||||
// Approximate width of a single legend item based on label text.
|
||||
const approxLegendItemWidth = calculateAverageLegendWidth(seriesLabels);
|
||||
const legendItemCount = seriesLabels.length;
|
||||
|
||||
if (legendConfig.position === LegendPosition.RIGHT) {
|
||||
const maxRightLegendWidth = Math.min(MAX_LEGEND_WIDTH, containerWidth * 0.3);
|
||||
const rightLegendWidth = Math.min(
|
||||
Math.max(150, approxLegendItemWidth),
|
||||
maxRightLegendWidth,
|
||||
);
|
||||
|
||||
return {
|
||||
width: Math.max(0, containerWidth - rightLegendWidth),
|
||||
height: containerHeight,
|
||||
legendWidth: rightLegendWidth,
|
||||
legendHeight: containerHeight,
|
||||
// Single vertical list on the right.
|
||||
legendsPerSet: 1,
|
||||
};
|
||||
}
|
||||
|
||||
const legendRowHeight = LEGEND_LINE_HEIGHT + LEGEND_PADDING;
|
||||
|
||||
const legendItemWidth = Math.min(approxLegendItemWidth, 400);
|
||||
const legendItemsPerRow = Math.max(
|
||||
1,
|
||||
Math.floor((containerWidth - LEGEND_PADDING * 2) / legendItemWidth),
|
||||
);
|
||||
|
||||
const legendRowCount = Math.min(
|
||||
2,
|
||||
Math.ceil(legendItemCount / legendItemsPerRow),
|
||||
);
|
||||
|
||||
const idealBottomLegendHeight =
|
||||
legendRowCount > 1
|
||||
? legendRowCount * legendRowHeight - LEGEND_PADDING
|
||||
: legendRowHeight;
|
||||
|
||||
const maxAllowedLegendHeight = Math.min(2 * legendRowHeight, 80);
|
||||
|
||||
const bottomLegendHeight = Math.min(
|
||||
idealBottomLegendHeight,
|
||||
maxAllowedLegendHeight,
|
||||
);
|
||||
|
||||
// How many legend items per row in the Legend component.
|
||||
const legendsPerSet = Math.ceil(
|
||||
(containerWidth + LEGEND_GAP) /
|
||||
(Math.min(MAX_LEGEND_WIDTH, approxLegendItemWidth) + LEGEND_GAP),
|
||||
);
|
||||
|
||||
return {
|
||||
width: containerWidth,
|
||||
height: Math.max(0, containerHeight - bottomLegendHeight),
|
||||
legendWidth: containerWidth,
|
||||
legendHeight: bottomLegendHeight,
|
||||
legendsPerSet,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
.chart-layout {
|
||||
position: relative;
|
||||
display: flex;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
flex-direction: column;
|
||||
|
||||
&--legend-right {
|
||||
flex-direction: row;
|
||||
|
||||
.chart-layout__legend-wrapper {
|
||||
padding-left: 0 !important;
|
||||
padding-right: 12px !important;
|
||||
}
|
||||
}
|
||||
|
||||
&__legend-wrapper {
|
||||
padding-left: 12px;
|
||||
padding-bottom: 12px;
|
||||
overflow: auto;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
import { useMemo } from 'react';
|
||||
import cx from 'classnames';
|
||||
import { calculateChartDimensions } from 'container/DashboardContainer/visualization/charts/utils';
|
||||
import { LegendConfig, LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
|
||||
import './ChartLayout.styles.scss';
|
||||
|
||||
export interface ChartLayoutProps {
|
||||
legendComponent: (legendPerSet: number) => React.ReactNode;
|
||||
children: (props: {
|
||||
chartWidth: number;
|
||||
chartHeight: number;
|
||||
}) => React.ReactNode;
|
||||
layoutChildren?: React.ReactNode;
|
||||
containerWidth: number;
|
||||
containerHeight: number;
|
||||
legendConfig: LegendConfig;
|
||||
config: UPlotConfigBuilder;
|
||||
}
|
||||
export default function ChartLayout({
|
||||
legendComponent,
|
||||
children,
|
||||
layoutChildren,
|
||||
containerWidth,
|
||||
containerHeight,
|
||||
legendConfig,
|
||||
config,
|
||||
}: ChartLayoutProps): JSX.Element {
|
||||
const chartDimensions = useMemo(
|
||||
() => {
|
||||
const legendItemsMap = config.getLegendItems();
|
||||
const seriesLabels = Object.values(legendItemsMap)
|
||||
.map((item) => item.label)
|
||||
.filter((label): label is string => label !== undefined);
|
||||
return calculateChartDimensions({
|
||||
containerWidth,
|
||||
containerHeight,
|
||||
legendConfig,
|
||||
seriesLabels,
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[containerWidth, containerHeight, legendConfig],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="chart-layout__container">
|
||||
<div
|
||||
className={cx('chart-layout', {
|
||||
'chart-layout--legend-right':
|
||||
legendConfig.position === LegendPosition.RIGHT,
|
||||
})}
|
||||
>
|
||||
<div className="chart-layout__content">
|
||||
{children({
|
||||
chartWidth: chartDimensions.width,
|
||||
chartHeight: chartDimensions.height,
|
||||
})}
|
||||
</div>
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.legendsPerSet)}
|
||||
</div>
|
||||
</div>
|
||||
{layoutChildren}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Represents the visibility state of a single series in a graph
|
||||
*/
|
||||
export interface SeriesVisibilityItem {
|
||||
label: string;
|
||||
show: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents the stored visibility state for a widget/graph
|
||||
*/
|
||||
export interface GraphVisibilityState {
|
||||
name: string;
|
||||
dataIndex: SeriesVisibilityItem[];
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
|
||||
import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
|
||||
|
||||
/**
|
||||
* Retrieves the visibility map for a specific widget from localStorage
|
||||
* @param widgetId - The unique identifier of the widget
|
||||
* @returns A Map of series labels to their visibility state, or null if not found
|
||||
*/
|
||||
export function getStoredSeriesVisibility(
|
||||
widgetId: string,
|
||||
): Map<string, boolean> | null {
|
||||
try {
|
||||
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
|
||||
|
||||
if (!storedData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const visibilityStates: GraphVisibilityState[] = JSON.parse(storedData);
|
||||
const widgetState = visibilityStates.find((state) => state.name === widgetId);
|
||||
|
||||
if (!widgetState?.dataIndex?.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new Map(widgetState.dataIndex.map((item) => [item.label, item.show]));
|
||||
} catch {
|
||||
// Silently handle parsing errors - fall back to default visibility
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function updateSeriesVisibilityToLocalStorage(
|
||||
widgetId: string,
|
||||
seriesVisibility: SeriesVisibilityItem[],
|
||||
): void {
|
||||
try {
|
||||
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
|
||||
|
||||
let visibilityStates: GraphVisibilityState[];
|
||||
|
||||
if (!storedData) {
|
||||
visibilityStates = [
|
||||
{
|
||||
name: widgetId,
|
||||
dataIndex: seriesVisibility,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
visibilityStates = JSON.parse(storedData);
|
||||
}
|
||||
const widgetState = visibilityStates.find((state) => state.name === widgetId);
|
||||
|
||||
if (!widgetState) {
|
||||
visibilityStates = [
|
||||
...visibilityStates,
|
||||
{
|
||||
name: widgetId,
|
||||
dataIndex: seriesVisibility,
|
||||
},
|
||||
];
|
||||
} else {
|
||||
widgetState.dataIndex = seriesVisibility;
|
||||
}
|
||||
|
||||
localStorage.setItem(
|
||||
LOCALSTORAGE.GRAPH_VISIBILITY_STATES,
|
||||
JSON.stringify(visibilityStates),
|
||||
);
|
||||
} catch {
|
||||
// Silently handle parsing errors - fall back to default visibility
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
} from 'container/NewWidget/RightContainer/timeItems';
|
||||
import PanelWrapper from 'container/PanelWrapper/PanelWrapper';
|
||||
import RightToolbarActions from 'container/QueryBuilder/components/ToolbarActions/RightToolbarActions';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useChartMutable } from 'hooks/useChartMutable';
|
||||
@@ -79,6 +80,7 @@ function FullView({
|
||||
}, [setCurrentGraphRef]);
|
||||
|
||||
const { selectedDashboard, isDashboardLocked } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const { user } = useAppContext();
|
||||
|
||||
const [editWidget] = useComponentPermission(['edit_widget'], user.role);
|
||||
@@ -114,7 +116,7 @@ function FullView({
|
||||
graphType: getGraphType(selectedPanelType),
|
||||
query: updatedQuery,
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
fillGaps: widget.fillSpans,
|
||||
formatForWeb: selectedPanelType === PANEL_TYPES.TABLE,
|
||||
originalGraphType: selectedPanelType,
|
||||
@@ -125,7 +127,7 @@ function FullView({
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: widget?.timePreferance || 'GLOBAL_TIME',
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
tableParams: {
|
||||
pagination: {
|
||||
offset: 0,
|
||||
|
||||
@@ -53,7 +53,7 @@ function GridCardGraph({
|
||||
customOnRowClick,
|
||||
customTimeRangeWindowForCoRelation,
|
||||
enableDrillDown,
|
||||
widgetsHavingDynamicVariables,
|
||||
widgetsByDynamicVariableId,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
@@ -226,8 +226,8 @@ function GridCardGraph({
|
||||
? Object.entries(variables).reduce((acc, [id, variable]) => {
|
||||
if (
|
||||
variable.type !== 'DYNAMIC' ||
|
||||
(widgetsHavingDynamicVariables?.[variable.id] &&
|
||||
widgetsHavingDynamicVariables?.[variable.id].includes(widget.id))
|
||||
(widgetsByDynamicVariableId?.[variable.id] &&
|
||||
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
|
||||
) {
|
||||
return { ...acc, [id]: variable.selectedValue };
|
||||
}
|
||||
|
||||
@@ -4,8 +4,9 @@ import { ToggleGraphProps } from 'components/Graph/types';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Dashboard, Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
import uPlot from 'uplot';
|
||||
@@ -50,7 +51,7 @@ export interface GridCardGraphProps {
|
||||
headerMenuList?: WidgetGraphComponentProps['headerMenuList'];
|
||||
onClickHandler?: OnClickPluginOpts['onClick'];
|
||||
isQueryEnabled: boolean;
|
||||
variables?: Dashboard['data']['variables'];
|
||||
variables?: IDashboardVariables;
|
||||
version?: string;
|
||||
onDragSelect: (start: number, end: number) => void;
|
||||
customOnDragSelect?: (start: number, end: number) => void;
|
||||
@@ -71,7 +72,7 @@ export interface GridCardGraphProps {
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
customTimeRangeWindowForCoRelation?: string | undefined;
|
||||
enableDrillDown?: boolean;
|
||||
widgetsHavingDynamicVariables?: Record<string, string[]>;
|
||||
widgetsByDynamicVariableId?: Record<string, string[]>;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
||||
@@ -14,8 +14,9 @@ import { QueryParams } from 'constants/query';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { createDynamicVariableToWidgetsMap } from 'hooks/dashboard/utils';
|
||||
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
@@ -34,7 +35,7 @@ import { useAppContext } from 'providers/App/App';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { sortLayout } from 'providers/Dashboard/util';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { IDashboardVariable, Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Props } from 'types/api/dashboard/update';
|
||||
import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
@@ -79,7 +80,9 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
|
||||
const { widgets, variables } = data || {};
|
||||
const { widgets } = data || {};
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const { user } = useAppContext();
|
||||
|
||||
@@ -99,21 +102,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
Record<string, { widgets: Layout[]; collapsed: boolean }>
|
||||
>({});
|
||||
|
||||
const widgetsHavingDynamicVariables = useMemo(() => {
|
||||
const dynamicVariables = Object.values(
|
||||
selectedDashboard?.data?.variables || {},
|
||||
)?.filter((variable: IDashboardVariable) => variable.type === 'DYNAMIC');
|
||||
|
||||
const widgets =
|
||||
selectedDashboard?.data?.widgets?.filter(
|
||||
(widget) => widget.panelTypes !== PANEL_GROUP_TYPES.ROW,
|
||||
) || [];
|
||||
|
||||
return createDynamicVariableToWidgetsMap(
|
||||
dynamicVariables,
|
||||
widgets as Widgets[],
|
||||
);
|
||||
}, [selectedDashboard]);
|
||||
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentPanelMap(panelMap);
|
||||
@@ -178,11 +167,11 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
dashboardId: selectedDashboard?.id,
|
||||
dashboardName: data.title,
|
||||
numberOfPanels: data.widgets?.length,
|
||||
numberOfVariables: Object.keys(data?.variables || {}).length || 0,
|
||||
numberOfVariables: Object.keys(dashboardVariables).length || 0,
|
||||
});
|
||||
logEventCalledRef.current = true;
|
||||
}
|
||||
}, [data, selectedDashboard?.id]);
|
||||
}, [dashboardVariables, data, selectedDashboard?.id]);
|
||||
|
||||
const onSaveHandler = (): void => {
|
||||
if (!selectedDashboard) {
|
||||
@@ -622,13 +611,13 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
<GridCard
|
||||
widget={(currentWidget as Widgets) || ({ id, query: {} } as Widgets)}
|
||||
headerMenuList={widgetActions}
|
||||
variables={variables}
|
||||
variables={dashboardVariables}
|
||||
// version={selectedDashboard?.data?.version}
|
||||
version={ENTITY_VERSION_V5}
|
||||
onDragSelect={onDragSelect}
|
||||
dataAvailable={checkIfDataExists}
|
||||
enableDrillDown={enableDrillDown}
|
||||
widgetsHavingDynamicVariables={widgetsHavingDynamicVariables}
|
||||
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
|
||||
/>
|
||||
</Card>
|
||||
</CardContainer>
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useCallback } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { getSubstituteVars } from 'api/dashboard/substitute_vars';
|
||||
import { prepareQueryRangePayloadV5 } from 'api/v5/v5';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { timePreferenceType } from 'container/NewWidget/RightContainer/timeItems';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getGraphType } from 'utils/getGraphType';
|
||||
@@ -36,14 +35,9 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
|
||||
|
||||
const queryRangeMutation = useMutation(getSubstituteVars);
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
() =>
|
||||
Object.values(selectedDashboard?.data?.variables || {})?.filter(
|
||||
(variable: IDashboardVariable) => variable.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
'DYNAMIC',
|
||||
'values',
|
||||
);
|
||||
|
||||
const getUpdatedQuery = useCallback(
|
||||
@@ -59,7 +53,7 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
|
||||
globalSelectedInterval,
|
||||
variables: getDashboardVariables(selectedDashboard?.data?.variables),
|
||||
originalGraphType: widgetConfig.panelTypes,
|
||||
dynamicVariables,
|
||||
dynamicVariables: dashboardDynamicVariables,
|
||||
});
|
||||
|
||||
// Execute query and process results
|
||||
@@ -68,7 +62,7 @@ function useUpdatedQuery(): UseUpdatedQueryResult {
|
||||
// Map query data from API response
|
||||
return mapQueryDataFromApi(queryResult.data.compositeQuery);
|
||||
},
|
||||
[dynamicVariables, globalSelectedInterval, queryRangeMutation],
|
||||
[dashboardDynamicVariables, globalSelectedInterval, queryRangeMutation],
|
||||
);
|
||||
|
||||
return {
|
||||
|
||||
@@ -39,8 +39,10 @@ import cx from 'classnames';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { sanitizeDashboardData } from 'container/DashboardContainer/DashboardDescription';
|
||||
import { downloadObjectAsJson } from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import {
|
||||
downloadObjectAsJson,
|
||||
sanitizeDashboardData,
|
||||
} from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import { Base64Icons } from 'container/DashboardContainer/DashboardSettings/General/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { useGetAllDashboard } from 'hooks/dashboard/useGetAllDashboard';
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
.dashboard-navigation {
|
||||
.run-query-dashboard-btn {
|
||||
min-width: 180px;
|
||||
}
|
||||
.ant-tabs-tab {
|
||||
border: none !important;
|
||||
margin-left: 0px !important;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { QueryKey } from 'react-query';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Tabs, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -35,8 +36,11 @@ import ClickHouseQueryContainer from './QueryBuilder/clickHouse';
|
||||
import PromQLQueryContainer from './QueryBuilder/promQL';
|
||||
|
||||
import './QuerySection.styles.scss';
|
||||
|
||||
function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
|
||||
function QuerySection({
|
||||
selectedGraph,
|
||||
queryRangeKey,
|
||||
isLoadingQueries,
|
||||
}: QueryProps): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
handleRunQuery: handleRunQueryFromQueryBuilder,
|
||||
@@ -237,7 +241,13 @@ function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
|
||||
tabBarExtraContent={
|
||||
<span style={{ display: 'flex', gap: '1rem', alignItems: 'center' }}>
|
||||
<TextToolTip text="This will temporarily save the current query and graph state. This will persist across tab change" />
|
||||
<RunQueryBtn label="Stage & Run Query" onStageRunQuery={handleRunQuery} />
|
||||
<RunQueryBtn
|
||||
className="run-query-dashboard-btn"
|
||||
label="Stage & Run Query"
|
||||
onStageRunQuery={handleRunQuery}
|
||||
isLoadingQueries={isLoadingQueries}
|
||||
queryRangeKey={queryRangeKey}
|
||||
/>
|
||||
</span>
|
||||
}
|
||||
items={items}
|
||||
@@ -248,6 +258,8 @@ function QuerySection({ selectedGraph }: QueryProps): JSX.Element {
|
||||
|
||||
interface QueryProps {
|
||||
selectedGraph: PANEL_TYPES;
|
||||
queryRangeKey?: QueryKey;
|
||||
isLoadingQueries?: boolean;
|
||||
}
|
||||
|
||||
export default QuerySection;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { memo, useEffect } from 'react';
|
||||
import { useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -24,8 +25,8 @@ function LeftContainer({
|
||||
setSelectedTracesFields,
|
||||
selectedWidget,
|
||||
requestData,
|
||||
setRequestData,
|
||||
isLoadingPanelData,
|
||||
setRequestData,
|
||||
setQueryResponse,
|
||||
enableDrillDown = false,
|
||||
}: WidgetGraphProps): JSX.Element {
|
||||
@@ -35,15 +36,20 @@ function LeftContainer({
|
||||
AppState,
|
||||
GlobalReducer
|
||||
>((state) => state.globalTime);
|
||||
const queryResponse = useGetQueryRange(requestData, ENTITY_VERSION_V5, {
|
||||
enabled: !!stagedQuery,
|
||||
queryKey: [
|
||||
const queryRangeKey = useMemo(
|
||||
() => [
|
||||
REACT_QUERY_KEY.GET_QUERY_RANGE,
|
||||
globalSelectedInterval,
|
||||
requestData,
|
||||
minTime,
|
||||
maxTime,
|
||||
],
|
||||
[globalSelectedInterval, requestData, minTime, maxTime],
|
||||
);
|
||||
const queryResponse = useGetQueryRange(requestData, ENTITY_VERSION_V5, {
|
||||
enabled: !!stagedQuery,
|
||||
queryKey: queryRangeKey,
|
||||
keepPreviousData: true,
|
||||
});
|
||||
|
||||
// Update parent component with query response for legend colors
|
||||
@@ -64,7 +70,11 @@ function LeftContainer({
|
||||
enableDrillDown={enableDrillDown}
|
||||
/>
|
||||
<QueryContainer className="query-section-left-container">
|
||||
<QuerySection selectedGraph={selectedGraph} />
|
||||
<QuerySection
|
||||
selectedGraph={selectedGraph}
|
||||
queryRangeKey={queryRangeKey}
|
||||
isLoadingQueries={queryResponse.isFetching}
|
||||
/>
|
||||
{selectedGraph === PANEL_TYPES.LIST && (
|
||||
<ExplorerColumnsRenderer
|
||||
selectedLogFields={selectedLogFields}
|
||||
|
||||
@@ -26,6 +26,7 @@ import { PANEL_TYPES, PanelDisplay } from 'constants/queryBuilder';
|
||||
import GraphTypes, {
|
||||
ItemsProps,
|
||||
} from 'container/DashboardContainer/ComponentsSlider/menuItems';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import useCreateAlerts from 'hooks/queryBuilder/useCreateAlerts';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import {
|
||||
@@ -35,7 +36,6 @@ import {
|
||||
Spline,
|
||||
SquareArrowOutUpRight,
|
||||
} from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import {
|
||||
ColumnUnit,
|
||||
@@ -131,7 +131,7 @@ function RightContainer({
|
||||
enableDrillDown = false,
|
||||
isNewDashboard,
|
||||
}: RightContainerProps): JSX.Element {
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const [inputValue, setInputValue] = useState(title);
|
||||
const [autoCompleteOpen, setAutoCompleteOpen] = useState(false);
|
||||
const [cursorPos, setCursorPos] = useState(0);
|
||||
@@ -173,16 +173,12 @@ function RightContainer({
|
||||
|
||||
const [graphTypes, setGraphTypes] = useState<ItemsProps[]>(GraphTypes);
|
||||
|
||||
// Get dashboard variables
|
||||
const dashboardVariables = useMemo<VariableOption[]>(() => {
|
||||
if (!selectedDashboard?.data?.variables) {
|
||||
return [];
|
||||
}
|
||||
return Object.entries(selectedDashboard.data.variables).map(([, value]) => ({
|
||||
const dashboardVariableOptions = useMemo<VariableOption[]>(() => {
|
||||
return Object.entries(dashboardVariables).map(([, value]) => ({
|
||||
value: value.name || '',
|
||||
label: value.name || '',
|
||||
}));
|
||||
}, [selectedDashboard?.data?.variables]);
|
||||
}, [dashboardVariables]);
|
||||
|
||||
const updateCursorAndDropdown = (value: string, pos: number): void => {
|
||||
setCursorPos(pos);
|
||||
@@ -274,7 +270,7 @@ function RightContainer({
|
||||
<section className="name-description">
|
||||
<Typography.Text className="typography">Name</Typography.Text>
|
||||
<AutoComplete
|
||||
options={dashboardVariables}
|
||||
options={dashboardVariableOptions}
|
||||
value={inputValue}
|
||||
onChange={onInputChange}
|
||||
onSelect={onSelect}
|
||||
|
||||
@@ -19,6 +19,7 @@ import {
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DashboardShortcuts } from 'constants/shortcuts/DashboardShortcuts';
|
||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
@@ -89,6 +90,8 @@ function NewWidget({
|
||||
columnWidths,
|
||||
} = useDashboard();
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
const { t } = useTranslation(['dashboard']);
|
||||
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
@@ -377,7 +380,7 @@ function NewWidget({
|
||||
graphType: PANEL_TYPES.LIST,
|
||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||
globalSelectedInterval: customGlobalSelectedInterval,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
tableParams: {
|
||||
pagination: {
|
||||
offset: 0,
|
||||
@@ -394,7 +397,7 @@ function NewWidget({
|
||||
formatForWeb:
|
||||
getGraphTypeForFormat(selectedGraph || selectedWidget.panelTypes) ===
|
||||
PANEL_TYPES.TABLE,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
originalGraphType: selectedGraph || selectedWidget?.panelTypes,
|
||||
};
|
||||
}
|
||||
@@ -408,7 +411,7 @@ function NewWidget({
|
||||
graphType: selectedGraph,
|
||||
selectedTime: selectedTime.enum || 'GLOBAL_TIME',
|
||||
globalSelectedInterval: customGlobalSelectedInterval,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import LaunchChatSupport from 'components/LaunchChatSupport/LaunchChatSupport';
|
||||
import { DOCS_BASE_URL } from 'constants/app';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useGetGlobalConfig } from 'hooks/globalConfig/useGetGlobalConfig';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import history from 'lib/history';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
@@ -148,6 +149,8 @@ function OnboardingAddDataSource(): JSX.Element {
|
||||
|
||||
const { org } = useAppContext();
|
||||
|
||||
const { data: globalConfig } = useGetGlobalConfig();
|
||||
|
||||
const [setupStepItems, setSetupStepItems] = useState(setupStepItemsBase);
|
||||
|
||||
const [searchQuery, setSearchQuery] = useState<string>('');
|
||||
@@ -233,6 +236,16 @@ function OnboardingAddDataSource(): JSX.Element {
|
||||
urlObj.searchParams.set('environment', selectedEnvironment);
|
||||
}
|
||||
|
||||
const ingestionUrl = globalConfig?.data?.ingestion_url;
|
||||
|
||||
if (ingestionUrl) {
|
||||
const parts = ingestionUrl.split('.');
|
||||
if (parts?.length > 1 && parts[0]?.includes('ingest')) {
|
||||
const region = parts[1];
|
||||
urlObj.searchParams.set('region', region);
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Return the updated URL as a string
|
||||
const updatedUrl = urlObj.toString();
|
||||
|
||||
|
||||
@@ -1,7 +1,17 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { screen } from '@testing-library/react';
|
||||
import { screen, within } from '@testing-library/react';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { findByText, fireEvent, render, waitFor } from 'tests/test-utils';
|
||||
import {
|
||||
findByText,
|
||||
fireEvent,
|
||||
render,
|
||||
userEvent,
|
||||
waitFor,
|
||||
} from 'tests/test-utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { pipelineApiResponseMockData } from '../mocks/pipeline';
|
||||
import PipelineListsView from '../PipelineListsView';
|
||||
@@ -75,7 +85,20 @@ jest.mock('providers/preferences/sync/usePreferenceSync', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL;
|
||||
const attributeKeysURL = `${BASE_URL}/api/v3/autocomplete/attribute_keys`;
|
||||
|
||||
describe('PipelinePage container test', () => {
|
||||
beforeAll(() => {
|
||||
server.listen();
|
||||
});
|
||||
afterEach(() => {
|
||||
server.resetHandlers();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
afterAll(() => {
|
||||
server.close();
|
||||
});
|
||||
it('should render PipelineListsView section', () => {
|
||||
const { getByText, container } = render(
|
||||
<PreferenceContextProvider>
|
||||
@@ -272,6 +295,7 @@ describe('PipelinePage container test', () => {
|
||||
});
|
||||
|
||||
it('should have populated form fields when edit pipeline is clicked', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
render(
|
||||
<PreferenceContextProvider>
|
||||
<PipelineListsView
|
||||
@@ -301,5 +325,52 @@ describe('PipelinePage container test', () => {
|
||||
|
||||
// to have length 2
|
||||
expect(screen.queryAllByText('source = nginx').length).toBe(2);
|
||||
|
||||
server.use(
|
||||
rest.get(attributeKeysURL, (_req, res, ctx) =>
|
||||
res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
status: 'success',
|
||||
data: {
|
||||
attributeKeys: [
|
||||
{
|
||||
key: 'otelServiceName',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
{
|
||||
key: 'service.instance.id',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
{
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'resource',
|
||||
},
|
||||
{
|
||||
key: 'service.name',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// Open Filter input and type to trigger suggestions
|
||||
const filterSelect = screen.getByTestId('qb-search-select');
|
||||
const input = within(filterSelect).getByRole('combobox') as HTMLInputElement;
|
||||
|
||||
await user.click(input);
|
||||
await waitFor(() =>
|
||||
expect(screen.getByText('otelServiceName')).toBeInTheDocument(),
|
||||
);
|
||||
|
||||
const serviceNameOccurences = await screen.findAllByText('service.name');
|
||||
expect(serviceNameOccurences.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { useCallback } from 'react';
|
||||
import { QueryKey, useIsFetching, useQueryClient } from 'react-query';
|
||||
import { Button } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import {
|
||||
ChevronUp,
|
||||
Command,
|
||||
@@ -9,35 +12,56 @@ import {
|
||||
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
|
||||
|
||||
import './RunQueryBtn.scss';
|
||||
|
||||
interface RunQueryBtnProps {
|
||||
className?: string;
|
||||
label?: string;
|
||||
isLoadingQueries?: boolean;
|
||||
handleCancelQuery?: () => void;
|
||||
onStageRunQuery?: () => void;
|
||||
queryRangeKey?: QueryKey;
|
||||
}
|
||||
|
||||
function RunQueryBtn({
|
||||
className,
|
||||
label,
|
||||
isLoadingQueries,
|
||||
handleCancelQuery,
|
||||
onStageRunQuery,
|
||||
queryRangeKey,
|
||||
}: RunQueryBtnProps): JSX.Element {
|
||||
const isMac = getUserOperatingSystem() === UserOperatingSystem.MACOS;
|
||||
return isLoadingQueries ? (
|
||||
const queryClient = useQueryClient();
|
||||
const isKeyFetchingCount = useIsFetching(
|
||||
queryRangeKey as QueryKey | undefined,
|
||||
);
|
||||
const isLoading =
|
||||
typeof isLoadingQueries === 'boolean'
|
||||
? isLoadingQueries
|
||||
: isKeyFetchingCount > 0;
|
||||
|
||||
const onCancel = useCallback(() => {
|
||||
if (handleCancelQuery) {
|
||||
return handleCancelQuery();
|
||||
}
|
||||
if (queryRangeKey) {
|
||||
queryClient.cancelQueries(queryRangeKey);
|
||||
}
|
||||
}, [handleCancelQuery, queryClient, queryRangeKey]);
|
||||
|
||||
return isLoading ? (
|
||||
<Button
|
||||
type="default"
|
||||
icon={<Loader2 size={14} className="loading-icon animate-spin" />}
|
||||
className="cancel-query-btn periscope-btn danger"
|
||||
onClick={handleCancelQuery}
|
||||
className={cx('cancel-query-btn periscope-btn danger', className)}
|
||||
onClick={onCancel}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
) : (
|
||||
<Button
|
||||
type="primary"
|
||||
className="run-query-btn periscope-btn primary"
|
||||
disabled={isLoadingQueries || !onStageRunQuery}
|
||||
className={cx('run-query-btn periscope-btn primary', className)}
|
||||
disabled={isLoading || !onStageRunQuery}
|
||||
onClick={onStageRunQuery}
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
|
||||
@@ -3,6 +3,16 @@ import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import RunQueryBtn from '../RunQueryBtn';
|
||||
|
||||
jest.mock('react-query', () => {
|
||||
const actual = jest.requireActual('react-query');
|
||||
return {
|
||||
...actual,
|
||||
useIsFetching: jest.fn(),
|
||||
useQueryClient: jest.fn(),
|
||||
};
|
||||
});
|
||||
import { useIsFetching, useQueryClient } from 'react-query';
|
||||
|
||||
// Mock OS util
|
||||
jest.mock('utils/getUserOS', () => ({
|
||||
getUserOperatingSystem: jest.fn(),
|
||||
@@ -11,10 +21,43 @@ jest.mock('utils/getUserOS', () => ({
|
||||
import { getUserOperatingSystem, UserOperatingSystem } from 'utils/getUserOS';
|
||||
|
||||
describe('RunQueryBtn', () => {
|
||||
test('renders run state and triggers on click', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
(useIsFetching as jest.Mock).mockReturnValue(0);
|
||||
(useQueryClient as jest.Mock).mockReturnValue({
|
||||
cancelQueries: jest.fn(),
|
||||
});
|
||||
});
|
||||
|
||||
test('uses isLoadingQueries prop over useIsFetching', () => {
|
||||
// Simulate fetching but prop forces not loading
|
||||
(useIsFetching as jest.Mock).mockReturnValue(1);
|
||||
const onRun = jest.fn();
|
||||
render(<RunQueryBtn onStageRunQuery={onRun} isLoadingQueries={false} />);
|
||||
// Should show "Run Query" (not cancel)
|
||||
const runBtn = screen.getByRole('button', { name: /run query/i });
|
||||
expect(runBtn).toBeInTheDocument();
|
||||
expect(runBtn).toBeEnabled();
|
||||
});
|
||||
|
||||
test('fallback cancel: uses handleCancelQuery when no key provided', () => {
|
||||
(useIsFetching as jest.Mock).mockReturnValue(0);
|
||||
const cancelQueries = jest.fn();
|
||||
(useQueryClient as jest.Mock).mockReturnValue({ cancelQueries });
|
||||
|
||||
const onCancel = jest.fn();
|
||||
render(<RunQueryBtn isLoadingQueries handleCancelQuery={onCancel} />);
|
||||
|
||||
const cancelBtn = screen.getByRole('button', { name: /cancel/i });
|
||||
fireEvent.click(cancelBtn);
|
||||
expect(onCancel).toHaveBeenCalledTimes(1);
|
||||
expect(cancelQueries).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('renders run state and triggers on click', () => {
|
||||
const onRun = jest.fn();
|
||||
render(<RunQueryBtn onStageRunQuery={onRun} />);
|
||||
const btn = screen.getByRole('button', { name: /run query/i });
|
||||
@@ -24,17 +67,11 @@ describe('RunQueryBtn', () => {
|
||||
});
|
||||
|
||||
test('disabled when onStageRunQuery is undefined', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
render(<RunQueryBtn />);
|
||||
expect(screen.getByRole('button', { name: /run query/i })).toBeDisabled();
|
||||
});
|
||||
|
||||
test('shows cancel state and calls handleCancelQuery', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const onCancel = jest.fn();
|
||||
render(<RunQueryBtn isLoadingQueries handleCancelQuery={onCancel} />);
|
||||
const cancel = screen.getByRole('button', { name: /cancel/i });
|
||||
@@ -42,10 +79,24 @@ describe('RunQueryBtn', () => {
|
||||
expect(onCancel).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('derives loading from queryKey via useIsFetching and cancels via queryClient', () => {
|
||||
(useIsFetching as jest.Mock).mockReturnValue(1);
|
||||
const cancelQueries = jest.fn();
|
||||
(useQueryClient as jest.Mock).mockReturnValue({ cancelQueries });
|
||||
|
||||
const queryKey = ['GET_QUERY_RANGE', '1h', { some: 'req' }, 1, 2];
|
||||
render(<RunQueryBtn queryRangeKey={queryKey} />);
|
||||
|
||||
// Button switches to cancel state
|
||||
const cancelBtn = screen.getByRole('button', { name: /cancel/i });
|
||||
expect(cancelBtn).toBeInTheDocument();
|
||||
|
||||
// Clicking cancel calls cancelQueries with the key
|
||||
fireEvent.click(cancelBtn);
|
||||
expect(cancelQueries).toHaveBeenCalledWith(queryKey);
|
||||
});
|
||||
|
||||
test('shows Command + CornerDownLeft on mac', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const { container } = render(
|
||||
<RunQueryBtn onStageRunQuery={(): void => {}} />,
|
||||
);
|
||||
@@ -70,9 +121,6 @@ describe('RunQueryBtn', () => {
|
||||
});
|
||||
|
||||
test('renders custom label when provided', () => {
|
||||
(getUserOperatingSystem as jest.Mock).mockReturnValue(
|
||||
UserOperatingSystem.MACOS,
|
||||
);
|
||||
const onRun = jest.fn();
|
||||
render(<RunQueryBtn onStageRunQuery={onRun} label="Stage & Run Query" />);
|
||||
expect(
|
||||
|
||||
@@ -356,7 +356,10 @@ function QueryBuilderSearch({
|
||||
|
||||
// conditional changes here to use a seperate component to render the example queries based on the option group label
|
||||
const customRendererForLogsExplorer = options.map((option) => (
|
||||
<Select.Option key={option.label} value={option.value}>
|
||||
<Select.Option
|
||||
key={`${option.label}-${option.type || ''}-${option.dataType || ''}`}
|
||||
value={option.value}
|
||||
>
|
||||
<OptionRendererForLogs
|
||||
label={option.label}
|
||||
value={option.value}
|
||||
@@ -371,6 +374,7 @@ function QueryBuilderSearch({
|
||||
return (
|
||||
<div className="query-builder-search-container">
|
||||
<Select
|
||||
data-testid={'qb-search-select'}
|
||||
ref={selectRef}
|
||||
getPopupContainer={popupContainer}
|
||||
transitionName=""
|
||||
@@ -488,7 +492,10 @@ function QueryBuilderSearch({
|
||||
{isLogsExplorerPage
|
||||
? customRendererForLogsExplorer
|
||||
: options.map((option) => (
|
||||
<Select.Option key={option.label} value={option.value}>
|
||||
<Select.Option
|
||||
key={`${option.label}-${option.type || ''}-${option.dataType || ''}`}
|
||||
value={option.value}
|
||||
>
|
||||
<OptionRenderer
|
||||
label={option.label}
|
||||
value={option.value}
|
||||
|
||||
@@ -19,6 +19,7 @@ import {
|
||||
} from 'constants/queryBuilder';
|
||||
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
|
||||
import { LogsExplorerShortcuts } from 'constants/shortcuts/logsExplorerShortcuts';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { WhereClauseConfig } from 'hooks/queryBuilder/useAutoComplete';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
@@ -38,9 +39,7 @@ import {
|
||||
unset,
|
||||
} from 'lodash-es';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import type { BaseSelectRef } from 'rc-select';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
@@ -248,14 +247,9 @@ function QueryBuilderSearchV2(
|
||||
return false;
|
||||
}, [currentState, query.aggregateAttribute?.dataType, query.dataSource]);
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
() =>
|
||||
Object.values(selectedDashboard?.data?.variables || {})?.filter(
|
||||
(variable: IDashboardVariable) => variable.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
'DYNAMIC',
|
||||
'values',
|
||||
);
|
||||
|
||||
const { data, isFetching } = useGetAggregateKeys(
|
||||
@@ -806,7 +800,7 @@ function QueryBuilderSearchV2(
|
||||
values.push(...(attributeValues?.payload?.[key] || []));
|
||||
|
||||
// here we want to suggest the variable name matching with the key here, we will go over the dynamic variables for the keys
|
||||
const variableName = dynamicVariables?.find(
|
||||
const variableName = dashboardDynamicVariables?.find(
|
||||
(variable) =>
|
||||
variable?.dynamicVariablesAttribute === currentFilterItem?.key?.key,
|
||||
)?.name;
|
||||
@@ -837,7 +831,7 @@ function QueryBuilderSearchV2(
|
||||
suggestionsData?.payload?.attributes,
|
||||
operatorConfigKey,
|
||||
currentFilterItem?.key?.key,
|
||||
dynamicVariables,
|
||||
dashboardDynamicVariables,
|
||||
]);
|
||||
|
||||
// keep the query in sync with the selected tags in logs explorer page
|
||||
|
||||
@@ -12,7 +12,9 @@ import {
|
||||
initialQueriesMap,
|
||||
initialQueryBuilderFormValues,
|
||||
} from 'constants/queryBuilder';
|
||||
import { IUseDashboardVariablesReturn } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -145,27 +147,23 @@ jest.mock('hooks/useSafeNavigate', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock dashboard provider with dynamic variables
|
||||
const mockDashboard = {
|
||||
data: {
|
||||
variables: {
|
||||
service: {
|
||||
id: 'service',
|
||||
name: 'service',
|
||||
type: 'DYNAMIC',
|
||||
dynamicVariablesAttribute: 'service.name',
|
||||
description: '',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
},
|
||||
},
|
||||
// Mock dashboard variables
|
||||
const dashboardVariables = {
|
||||
service: {
|
||||
id: 'service',
|
||||
name: 'service',
|
||||
type: 'DYNAMIC' as IDashboardVariable['type'],
|
||||
dynamicVariablesAttribute: 'service.name',
|
||||
description: '',
|
||||
sort: 'DISABLED' as IDashboardVariable['sort'],
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
},
|
||||
};
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: mockDashboard,
|
||||
jest.mock('hooks/dashboard/useDashboardVariables', () => ({
|
||||
useDashboardVariables: (): IUseDashboardVariablesReturn => ({
|
||||
dashboardVariables: dashboardVariables,
|
||||
}),
|
||||
}));
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { ArrowLeft, Plus, Settings, X } from 'lucide-react';
|
||||
import ContextMenu from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
// import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -33,17 +33,9 @@ const useDashboardVarConfig = ({
|
||||
};
|
||||
// contextItems: React.ReactNode;
|
||||
} => {
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType('DYNAMIC');
|
||||
const { onValueUpdate, createVariable } = useDashboardVariableUpdate();
|
||||
|
||||
const dynamicDashboardVariables = useMemo(
|
||||
(): [string, IDashboardVariable][] =>
|
||||
Object.entries(selectedDashboard?.data?.variables || {}).filter(
|
||||
([, value]) => value.name && value.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
);
|
||||
|
||||
// Function to determine the source from query data
|
||||
const getSourceFromQuery = useCallback(():
|
||||
| 'logs'
|
||||
@@ -116,7 +108,7 @@ const useDashboardVarConfig = ({
|
||||
<>
|
||||
{' '}
|
||||
{Object.entries(fieldVariables).map(([fieldName, value]) => {
|
||||
const dashboardVar = dynamicDashboardVariables.find(
|
||||
const dashboardVar = dashboardDynamicVariables.find(
|
||||
([, dynamicValue]) =>
|
||||
dynamicValue.dynamicVariablesAttribute === fieldName,
|
||||
);
|
||||
@@ -178,7 +170,7 @@ const useDashboardVarConfig = ({
|
||||
),
|
||||
[
|
||||
fieldVariables,
|
||||
dynamicDashboardVariables,
|
||||
dashboardDynamicVariables,
|
||||
handleSetVariable,
|
||||
handleUnsetVariable,
|
||||
handleCreateVariable,
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import React from 'react';
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
|
||||
import useGetResolvedText from '../useGetResolvedText';
|
||||
|
||||
// Mock the useDashboard hook
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: function useDashboardMock(): any {
|
||||
return {
|
||||
selectedDashboard: null,
|
||||
};
|
||||
},
|
||||
// Create a mock function that we can modify per test
|
||||
let mockDashboardVariables: IDashboardVariables = {};
|
||||
|
||||
// Mock the useDashboardVariables hook
|
||||
jest.mock('hooks/dashboard/useDashboardVariables', () => ({
|
||||
useDashboardVariables: jest.fn(() => ({
|
||||
dashboardVariables: mockDashboardVariables,
|
||||
})),
|
||||
}));
|
||||
|
||||
describe('useGetResolvedText', () => {
|
||||
@@ -20,13 +22,35 @@ describe('useGetResolvedText', () => {
|
||||
const TRUNCATED_SERVICE = 'test, app +2';
|
||||
const TEXT_TEMPLATE = 'Logs count in $service.name in $severity';
|
||||
|
||||
const renderHookWithProps = (props: {
|
||||
text: string | React.ReactNode;
|
||||
variables?: Record<string, string | number | boolean>;
|
||||
dashboardVariables?: Record<string, any>;
|
||||
maxLength?: number;
|
||||
matcher?: string;
|
||||
}): any => renderHook(() => useGetResolvedText(props));
|
||||
const renderHookWithProps = (
|
||||
props: {
|
||||
text: string | React.ReactNode;
|
||||
maxLength?: number;
|
||||
matcher?: string;
|
||||
},
|
||||
variables?: Record<string, string | number | boolean>,
|
||||
): any => {
|
||||
if (variables) {
|
||||
mockDashboardVariables = Object.entries(
|
||||
variables,
|
||||
).reduce<IDashboardVariables>((acc, [key, value]) => {
|
||||
acc[key] = {
|
||||
id: key,
|
||||
name: key,
|
||||
description: '',
|
||||
type: 'CUSTOM' as const,
|
||||
sort: 'DISABLED' as const,
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
selectedValue: value,
|
||||
};
|
||||
return acc;
|
||||
}, {});
|
||||
} else {
|
||||
mockDashboardVariables = {};
|
||||
}
|
||||
return renderHook(() => useGetResolvedText(props));
|
||||
};
|
||||
|
||||
it('should resolve variables with truncated and full text', () => {
|
||||
const text = TEXT_TEMPLATE;
|
||||
@@ -35,7 +59,7 @@ describe('useGetResolvedText', () => {
|
||||
severity: SEVERITY_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.truncatedText).toBe(
|
||||
`Logs count in ${TRUNCATED_SERVICE} in DEBUG, INFO`,
|
||||
@@ -50,7 +74,7 @@ describe('useGetResolvedText', () => {
|
||||
severity: SEVERITY_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables, maxLength: 20 });
|
||||
const { result } = renderHookWithProps({ text, maxLength: 20 }, variables);
|
||||
|
||||
expect(result.current.truncatedText).toBe('Logs count in test, a...');
|
||||
expect(result.current.fullText).toBe(EXPECTED_FULL_TEXT);
|
||||
@@ -62,7 +86,7 @@ describe('useGetResolvedText', () => {
|
||||
'service.name': SERVICE_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.truncatedText).toBe(
|
||||
'Logs count in test, app +2 and test, app +2',
|
||||
@@ -80,7 +104,7 @@ describe('useGetResolvedText', () => {
|
||||
'$dyn-service.name': 'dyn-1, dyn-2',
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.truncatedText).toBe(
|
||||
'Logs in test, app +2, test, app +2, test, app +2 - dyn-1, dyn-2',
|
||||
@@ -97,7 +121,7 @@ describe('useGetResolvedText', () => {
|
||||
severity: SEVERITY_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables, matcher: '#' });
|
||||
const { result } = renderHookWithProps({ text, matcher: '#' }, variables);
|
||||
|
||||
expect(result.current.truncatedText).toBe(
|
||||
'Logs count in test, app +2 in DEBUG, INFO',
|
||||
@@ -112,7 +136,7 @@ describe('useGetResolvedText', () => {
|
||||
active: true,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.fullText).toBe('Count: 42, Active: true');
|
||||
expect(result.current.truncatedText).toBe('Count: 42, Active: true');
|
||||
@@ -124,7 +148,7 @@ describe('useGetResolvedText', () => {
|
||||
'service.name': SERVICE_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.truncatedText).toBe(
|
||||
'Logs count in test, app +2 in $unknown',
|
||||
@@ -140,10 +164,12 @@ describe('useGetResolvedText', () => {
|
||||
'service.name': SERVICE_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({
|
||||
text: reactNodeText,
|
||||
const { result } = renderHookWithProps(
|
||||
{
|
||||
text: reactNodeText,
|
||||
},
|
||||
variables,
|
||||
});
|
||||
);
|
||||
|
||||
// Should return the ReactNode unchanged
|
||||
expect(result.current.fullText).toBe(reactNodeText);
|
||||
@@ -156,10 +182,12 @@ describe('useGetResolvedText', () => {
|
||||
'service.name': SERVICE_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({
|
||||
text,
|
||||
const { result } = renderHookWithProps(
|
||||
{
|
||||
text,
|
||||
},
|
||||
variables,
|
||||
});
|
||||
);
|
||||
|
||||
// Should return the number unchanged
|
||||
expect(result.current.fullText).toBe(text);
|
||||
@@ -172,10 +200,12 @@ describe('useGetResolvedText', () => {
|
||||
'service.name': SERVICE_VAR,
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({
|
||||
text,
|
||||
const { result } = renderHookWithProps(
|
||||
{
|
||||
text,
|
||||
},
|
||||
variables,
|
||||
});
|
||||
);
|
||||
|
||||
// Should return the boolean unchanged
|
||||
expect(result.current.fullText).toBe(text);
|
||||
@@ -189,7 +219,7 @@ describe('useGetResolvedText', () => {
|
||||
'config.database.host': 'localhost:5432',
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.fullText).toBe('API: /users Config: localhost:5432');
|
||||
expect(result.current.truncatedText).toBe(
|
||||
@@ -204,7 +234,7 @@ describe('useGetResolvedText', () => {
|
||||
'error.type': 'timeout',
|
||||
};
|
||||
|
||||
const { result } = renderHookWithProps({ text, variables });
|
||||
const { result } = renderHookWithProps({ text }, variables);
|
||||
|
||||
expect(result.current.fullText).toBe('Status: web-api, Error: timeout;');
|
||||
expect(result.current.truncatedText).toBe('Status: web-api, Error: timeout;');
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
@@ -38,20 +38,17 @@ interface ResolvedTextUtilsResult {
|
||||
|
||||
function useContextVariables({
|
||||
maxValues = 2,
|
||||
// ! To be noted: This customVariables is not Dashboard Custom Variables
|
||||
customVariables,
|
||||
}: UseContextVariablesProps): UseContextVariablesResult {
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
// Extract dashboard variables
|
||||
const dashboardVariables = useMemo(() => {
|
||||
if (!selectedDashboard?.data?.variables) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(selectedDashboard.data.variables)
|
||||
const processedDashboardVariables = useMemo(() => {
|
||||
return Object.entries(dashboardVariables)
|
||||
.filter(([, value]) => value.name)
|
||||
.map(([, value]) => {
|
||||
let processedValue: string | number | boolean;
|
||||
@@ -74,7 +71,7 @@ function useContextVariables({
|
||||
originalValue: value.selectedValue,
|
||||
};
|
||||
});
|
||||
}, [selectedDashboard]);
|
||||
}, [dashboardVariables]);
|
||||
|
||||
// Extract global variables
|
||||
const globalVariables = useMemo(
|
||||
@@ -111,8 +108,12 @@ function useContextVariables({
|
||||
|
||||
// Combine all variables
|
||||
const allVariables = useMemo(
|
||||
() => [...dashboardVariables, ...globalVariables, ...customVariablesList],
|
||||
[dashboardVariables, globalVariables, customVariablesList],
|
||||
() => [
|
||||
...processedDashboardVariables,
|
||||
...globalVariables,
|
||||
...customVariablesList,
|
||||
],
|
||||
[processedDashboardVariables, globalVariables, customVariablesList],
|
||||
);
|
||||
|
||||
// Create processed variables with truncation logic
|
||||
|
||||
21
frontend/src/hooks/dashboard/useDashboardVariables.ts
Normal file
21
frontend/src/hooks/dashboard/useDashboardVariables.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { useSyncExternalStore } from 'react';
|
||||
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
IDashboardVariables,
|
||||
} from '../../providers/Dashboard/store/dashboardVariablesStore';
|
||||
|
||||
export interface IUseDashboardVariablesReturn {
|
||||
dashboardVariables: IDashboardVariables;
|
||||
}
|
||||
|
||||
export const useDashboardVariables = (): IUseDashboardVariablesReturn => {
|
||||
const dashboardVariables = useSyncExternalStore(
|
||||
dashboardVariablesStore.subscribe,
|
||||
dashboardVariablesStore.getSnapshot,
|
||||
);
|
||||
|
||||
return {
|
||||
dashboardVariables,
|
||||
};
|
||||
};
|
||||
30
frontend/src/hooks/dashboard/useDashboardVariablesByType.ts
Normal file
30
frontend/src/hooks/dashboard/useDashboardVariablesByType.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { useMemo } from 'react';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TVariableQueryType,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useDashboardVariables } from './useDashboardVariables';
|
||||
|
||||
export function useDashboardVariablesByType(
|
||||
variableType: TVariableQueryType,
|
||||
returnType: 'values',
|
||||
): IDashboardVariable[];
|
||||
export function useDashboardVariablesByType(
|
||||
variableType: TVariableQueryType,
|
||||
returnType?: 'entries',
|
||||
): [string, IDashboardVariable][];
|
||||
export function useDashboardVariablesByType(
|
||||
variableType: TVariableQueryType,
|
||||
returnType?: 'values' | 'entries',
|
||||
): IDashboardVariable[] | [string, IDashboardVariable][] {
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
return useMemo(() => {
|
||||
const entries = Object.entries(dashboardVariables || {}).filter(
|
||||
(entry): entry is [string, IDashboardVariable] =>
|
||||
Boolean(entry[1].name) && entry[1].type === variableType,
|
||||
);
|
||||
return returnType === 'values' ? entries.map(([, value]) => value) : entries;
|
||||
}, [dashboardVariables, variableType, returnType]);
|
||||
}
|
||||
@@ -5,7 +5,7 @@
|
||||
// return value should be a full text string, and a truncated text string (if max length is provided)
|
||||
|
||||
import { ReactNode, useCallback, useMemo } from 'react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
|
||||
interface UseGetResolvedTextProps {
|
||||
text: string | ReactNode;
|
||||
@@ -23,23 +23,15 @@ interface ResolvedTextResult {
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function useGetResolvedText({
|
||||
text,
|
||||
variables,
|
||||
maxLength,
|
||||
matcher = '$',
|
||||
maxValues = 2, // Default to showing 2 values before +n more
|
||||
}: UseGetResolvedTextProps): ResolvedTextResult {
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const isString = typeof text === 'string';
|
||||
|
||||
const processedDashboardVariables = useMemo(() => {
|
||||
if (variables) {
|
||||
return variables;
|
||||
}
|
||||
if (!selectedDashboard?.data.variables) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return Object.entries(selectedDashboard.data.variables).reduce<
|
||||
return Object.entries(dashboardVariables).reduce<
|
||||
Record<string, string | number | boolean>
|
||||
>((acc, [, value]) => {
|
||||
if (!value.name) {
|
||||
@@ -54,7 +46,7 @@ function useGetResolvedText({
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
}, [variables, selectedDashboard?.data.variables]);
|
||||
}, [dashboardVariables]);
|
||||
|
||||
// Process array values to add +n more notation for truncated text
|
||||
const processedVariables = useMemo(() => {
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
import { useMemo } from 'react';
|
||||
import { PANEL_GROUP_TYPES } from 'constants/queryBuilder';
|
||||
import { createDynamicVariableToWidgetsMap } from 'hooks/dashboard/utils';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useDashboardVariablesByType } from './useDashboardVariablesByType';
|
||||
|
||||
/**
|
||||
* Hook to get a map of dynamic variable IDs to widget IDs that use them.
|
||||
* This is useful for determining which widgets need to be refreshed when a dynamic variable changes.
|
||||
*/
|
||||
export function useWidgetsByDynamicVariableId(): Record<string, string[]> {
|
||||
const dynamicVariables = useDashboardVariablesByType('DYNAMIC', 'values');
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
return useMemo(() => {
|
||||
const widgets =
|
||||
selectedDashboard?.data?.widgets?.filter(
|
||||
(widget) => widget.panelTypes !== PANEL_GROUP_TYPES.ROW,
|
||||
) || [];
|
||||
|
||||
return createDynamicVariableToWidgetsMap(
|
||||
dynamicVariables,
|
||||
widgets as Widgets[],
|
||||
);
|
||||
}, [selectedDashboard, dynamicVariables]);
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { useCallback } from 'react';
|
||||
import { useMutation } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -10,13 +10,15 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { MenuItemKeys } from 'container/GridCardLayout/WidgetHeader/contants';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { mapQueryDataFromApi } from 'lib/newQueryBuilder/queryBuilderMappers/mapQueryDataFromApi';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable, Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { getGraphType } from 'utils/getGraphType';
|
||||
|
||||
@@ -32,12 +34,10 @@ const useCreateAlerts = (widget?: Widgets, caller?: string): VoidFunction => {
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
() =>
|
||||
Object.values(selectedDashboard?.data?.variables || {})?.filter(
|
||||
(variable: IDashboardVariable) => variable.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
'DYNAMIC',
|
||||
'values',
|
||||
);
|
||||
|
||||
return useCallback(() => {
|
||||
@@ -68,9 +68,9 @@ const useCreateAlerts = (widget?: Widgets, caller?: string): VoidFunction => {
|
||||
globalSelectedInterval,
|
||||
graphType: getGraphType(widget.panelTypes),
|
||||
selectedTime: widget.timePreferance,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
originalGraphType: widget.panelTypes,
|
||||
dynamicVariables,
|
||||
dynamicVariables: dashboardDynamicVariables,
|
||||
});
|
||||
queryRangeMutation.mutate(queryPayload, {
|
||||
onSuccess: (data) => {
|
||||
@@ -104,10 +104,10 @@ const useCreateAlerts = (widget?: Widgets, caller?: string): VoidFunction => {
|
||||
globalSelectedInterval,
|
||||
notifications,
|
||||
queryRangeMutation,
|
||||
selectedDashboard?.data.variables,
|
||||
dashboardVariables,
|
||||
dashboardDynamicVariables,
|
||||
selectedDashboard?.data.version,
|
||||
widget,
|
||||
dynamicVariables,
|
||||
]);
|
||||
};
|
||||
|
||||
|
||||
@@ -4,14 +4,13 @@ import { isAxiosError } from 'axios';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { updateBarStepInterval } from 'container/GridCardLayout/utils';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import {
|
||||
GetMetricQueryRange,
|
||||
GetQueryResultsProps,
|
||||
} from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
@@ -43,14 +42,9 @@ export const useGetQueryRange: UseGetQueryRange = (
|
||||
headers,
|
||||
publicQueryMeta,
|
||||
) => {
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
() =>
|
||||
Object.values(selectedDashboard?.data?.variables || {})?.filter(
|
||||
(variable: IDashboardVariable) => variable.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
'DYNAMIC',
|
||||
'values',
|
||||
);
|
||||
|
||||
const newRequestData: GetQueryResultsProps = useMemo(() => {
|
||||
@@ -159,7 +153,7 @@ export const useGetQueryRange: UseGetQueryRange = (
|
||||
GetMetricQueryRange(
|
||||
modifiedRequestData,
|
||||
version,
|
||||
dynamicVariables,
|
||||
dashboardDynamicVariables,
|
||||
signal,
|
||||
headers,
|
||||
undefined,
|
||||
|
||||
@@ -2,9 +2,9 @@ import { UseQueryOptions, UseQueryResult } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { getDashboardVariables } from 'lib/dashbaordVariables/getDashboardVariables';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
@@ -28,7 +28,7 @@ export const useGetWidgetQueryRange = (
|
||||
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
|
||||
const { selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
return useGetQueryRange(
|
||||
{
|
||||
@@ -36,7 +36,7 @@ export const useGetWidgetQueryRange = (
|
||||
selectedTime,
|
||||
globalSelectedInterval,
|
||||
query: stagedQuery || initialQueriesMap.metrics,
|
||||
variables: getDashboardVariables(selectedDashboard?.data.variables),
|
||||
variables: getDashboardVariables(dashboardVariables),
|
||||
},
|
||||
version,
|
||||
{
|
||||
|
||||
@@ -4,9 +4,8 @@ import {
|
||||
getTagToken,
|
||||
} from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import { Option } from 'container/QueryBuilder/type';
|
||||
import { useDashboardVariablesByType } from 'hooks/dashboard/useDashboardVariablesByType';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { WhereClauseConfig } from './useAutoComplete';
|
||||
@@ -32,16 +31,12 @@ export const useOptions = (
|
||||
const operators = useOperators(key, keys);
|
||||
|
||||
// get matching dynamic variables to suggest
|
||||
const { selectedDashboard } = useDashboard();
|
||||
|
||||
const dynamicVariables = useMemo(
|
||||
() =>
|
||||
Object.values(selectedDashboard?.data?.variables || {})?.filter(
|
||||
(variable: IDashboardVariable) => variable.type === 'DYNAMIC',
|
||||
),
|
||||
[selectedDashboard],
|
||||
const dashboardDynamicVariables = useDashboardVariablesByType(
|
||||
'DYNAMIC',
|
||||
'values',
|
||||
);
|
||||
const variableName = dynamicVariables?.find(
|
||||
|
||||
const variableName = dashboardDynamicVariables?.find(
|
||||
(variable) => variable?.dynamicVariablesAttribute === key,
|
||||
)?.name;
|
||||
|
||||
@@ -193,7 +188,11 @@ export const useOptions = (
|
||||
(option, index, self) =>
|
||||
index ===
|
||||
self.findIndex(
|
||||
(o) => o.label === option.label && o.value === option.value, // to remove duplicate & empty options from list
|
||||
(o) =>
|
||||
o.label === option.label &&
|
||||
o.value === option.value &&
|
||||
(o.type || '') === (option.type || '') &&
|
||||
(o.dataType || '') === (option.dataType || ''), // keep entries with same key but different type/dataType
|
||||
) && option.value !== '',
|
||||
) || []
|
||||
).map((option) => {
|
||||
|
||||
@@ -16,13 +16,19 @@ export function useResizeObserver<T extends HTMLElement>(
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const handleResize = debounce((entries: ResizeObserverEntry[]) => {
|
||||
const entry = entries[0];
|
||||
if (entry) {
|
||||
const { width, height } = entry.contentRect;
|
||||
setSize({ width, height });
|
||||
}
|
||||
}, debounceTime);
|
||||
const handleResize = debounce(
|
||||
(entries: ResizeObserverEntry[]) => {
|
||||
const entry = entries[0];
|
||||
if (entry) {
|
||||
const { width, height } = entry.contentRect;
|
||||
setSize({ width, height });
|
||||
}
|
||||
},
|
||||
debounceTime,
|
||||
{
|
||||
leading: true,
|
||||
},
|
||||
);
|
||||
|
||||
const ro = new ResizeObserver(handleResize);
|
||||
const referenceNode = ref.current;
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { IDashboardVariables } from 'providers/Dashboard/store/dashboardVariablesStore';
|
||||
import store from 'store';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
|
||||
export const getDashboardVariables = (
|
||||
variables?: Dashboard['data']['variables'],
|
||||
variables?: IDashboardVariables,
|
||||
): Record<string, unknown> => {
|
||||
if (!variables) {
|
||||
return {};
|
||||
|
||||
119
frontend/src/lib/uPlotV2/components/Legend/Legend.styles.scss
Normal file
119
frontend/src/lib/uPlotV2/components/Legend/Legend.styles.scss
Normal file
@@ -0,0 +1,119 @@
|
||||
.legend-container {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
&:has(.legend-item-focused) .legend-item {
|
||||
opacity: 0.3;
|
||||
}
|
||||
|
||||
&:has(.legend-item-focused) .legend-item.legend-item-focused {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.legend-virtuoso-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-track {
|
||||
background: transparent;
|
||||
}
|
||||
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-slate-100);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.legend-row {
|
||||
padding: 4px 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
gap: 8px 16px;
|
||||
|
||||
&.legend-single-row {
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
&.legend-row-right {
|
||||
flex-direction: column;
|
||||
align-items: flex-start;
|
||||
justify-content: flex-start;
|
||||
}
|
||||
&.legend-row-bottom {
|
||||
flex-direction: row;
|
||||
}
|
||||
}
|
||||
|
||||
.legend-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 4px 8px;
|
||||
border-radius: 4px;
|
||||
max-width: min(400px, 100%);
|
||||
cursor: pointer;
|
||||
|
||||
&.legend-item-off {
|
||||
opacity: 0.3;
|
||||
text-decoration: line-through;
|
||||
text-decoration-thickness: 1px;
|
||||
}
|
||||
|
||||
&.legend-item-focused {
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.legend-marker {
|
||||
border-width: 2px;
|
||||
border-radius: 50%;
|
||||
min-width: 11px;
|
||||
min-height: 11px;
|
||||
width: 11px;
|
||||
height: 11px;
|
||||
flex-shrink: 0;
|
||||
cursor: pointer;
|
||||
transition: transform 0.2s ease;
|
||||
position: relative;
|
||||
|
||||
&:hover {
|
||||
transform: scale(1.2);
|
||||
box-shadow: 0 0 0 2px rgba(255, 255, 255, 0.3);
|
||||
}
|
||||
|
||||
&:active {
|
||||
transform: scale(0.9);
|
||||
}
|
||||
}
|
||||
|
||||
.legend-label {
|
||||
flex: 1;
|
||||
font-size: 12px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
background: rgba(255, 255, 255, 0.05);
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.legend-container {
|
||||
.legend-virtuoso-container {
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: var(--bg-vanilla-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
99
frontend/src/lib/uPlotV2/components/Legend/Legend.tsx
Normal file
99
frontend/src/lib/uPlotV2/components/Legend/Legend.tsx
Normal file
@@ -0,0 +1,99 @@
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { Tooltip as AntdTooltip } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { LegendItem } from 'lib/uPlotV2/config/types';
|
||||
import useLegendsSync from 'lib/uPlotV2/hooks/useLegendsSync';
|
||||
import { LegendPosition } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { LegendProps } from '../types';
|
||||
import { useLegendActions } from './useLegendActions';
|
||||
|
||||
import './Legend.styles.scss';
|
||||
|
||||
const LEGENDS_PER_SET_DEFAULT = 5;
|
||||
|
||||
export default function Legend({
|
||||
position = LegendPosition.BOTTOM,
|
||||
config,
|
||||
legendsPerSet = LEGENDS_PER_SET_DEFAULT,
|
||||
}: LegendProps): JSX.Element {
|
||||
const {
|
||||
legendItemsMap,
|
||||
focusedSeriesIndex,
|
||||
setFocusedSeriesIndex,
|
||||
} = useLegendsSync({ config });
|
||||
const {
|
||||
onLegendClick,
|
||||
onLegendMouseMove,
|
||||
onLegendMouseLeave,
|
||||
} = useLegendActions({
|
||||
setFocusedSeriesIndex,
|
||||
focusedSeriesIndex,
|
||||
});
|
||||
const legendContainerRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
// Chunk legend items into rows of LEGENDS_PER_ROW items each
|
||||
const legendRows = useMemo(() => {
|
||||
const legendItems = Object.values(legendItemsMap);
|
||||
|
||||
return legendItems.reduce((acc: LegendItem[][], curr, i) => {
|
||||
if (i % legendsPerSet === 0) {
|
||||
acc.push([]);
|
||||
}
|
||||
acc[acc.length - 1].push(curr);
|
||||
return acc;
|
||||
}, [] as LegendItem[][]);
|
||||
}, [legendItemsMap, legendsPerSet]);
|
||||
|
||||
const renderLegendRow = useCallback(
|
||||
(rowIndex: number, row: LegendItem[]): JSX.Element => (
|
||||
<div
|
||||
key={rowIndex}
|
||||
className={cx(
|
||||
'legend-row',
|
||||
`legend-row-${position.toLowerCase()}`,
|
||||
legendRows.length === 1 && position === LegendPosition.BOTTOM
|
||||
? 'legend-single-row'
|
||||
: '',
|
||||
)}
|
||||
>
|
||||
{row.map((item) => (
|
||||
<AntdTooltip key={item.seriesIndex} title={item.label}>
|
||||
<div
|
||||
data-legend-item-id={item.seriesIndex}
|
||||
className={cx('legend-item', {
|
||||
'legend-item-off': !item.show,
|
||||
'legend-item-focused': focusedSeriesIndex === item.seriesIndex,
|
||||
})}
|
||||
>
|
||||
<div
|
||||
className="legend-marker"
|
||||
style={{ borderColor: String(item.color) }}
|
||||
data-is-legend-marker={true}
|
||||
/>
|
||||
<span className="legend-label">{item.label}</span>
|
||||
</div>
|
||||
</AntdTooltip>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
[focusedSeriesIndex, position, legendRows],
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
ref={legendContainerRef}
|
||||
className="legend-container"
|
||||
onClick={onLegendClick}
|
||||
onMouseMove={onLegendMouseMove}
|
||||
onMouseLeave={onLegendMouseLeave}
|
||||
>
|
||||
<Virtuoso
|
||||
className="legend-virtuoso-container"
|
||||
data={legendRows}
|
||||
itemContent={(index, row): JSX.Element => renderLegendRow(index, row)}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
118
frontend/src/lib/uPlotV2/components/Legend/useLegendActions.ts
Normal file
118
frontend/src/lib/uPlotV2/components/Legend/useLegendActions.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import { usePlotContext } from 'lib/uPlotV2/context/PlotContext';
|
||||
|
||||
export function useLegendActions({
|
||||
setFocusedSeriesIndex,
|
||||
focusedSeriesIndex,
|
||||
}: {
|
||||
setFocusedSeriesIndex: Dispatch<SetStateAction<number | null>>;
|
||||
focusedSeriesIndex: number | null;
|
||||
}): {
|
||||
onLegendClick: (e: React.MouseEvent<HTMLDivElement>) => void;
|
||||
onFocusSeries: (seriesIndex: number | null) => void;
|
||||
onLegendMouseMove: (e: React.MouseEvent<HTMLDivElement>) => void;
|
||||
onLegendMouseLeave: () => void;
|
||||
} {
|
||||
const {
|
||||
onFocusSeries: onFocusSeriesPlot,
|
||||
onToggleSeriesOnOff,
|
||||
onToggleSeriesVisibility,
|
||||
} = usePlotContext();
|
||||
|
||||
const rafId = useRef<number | null>(null); // requestAnimationFrame id
|
||||
|
||||
const getLegendItemIdFromEvent = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>): string | undefined => {
|
||||
const target = e.target as HTMLElement | null;
|
||||
if (!target) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const legendItemElement = target.closest<HTMLElement>(
|
||||
'[data-legend-item-id]',
|
||||
);
|
||||
|
||||
return legendItemElement?.dataset.legendItemId;
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const onLegendClick = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>): void => {
|
||||
const legendItemId = getLegendItemIdFromEvent(e);
|
||||
if (!legendItemId) {
|
||||
return;
|
||||
}
|
||||
const isLegendMarker = (e.target as HTMLElement).dataset.isLegendMarker;
|
||||
const seriesIndex = Number(legendItemId);
|
||||
|
||||
if (isLegendMarker) {
|
||||
onToggleSeriesOnOff(seriesIndex);
|
||||
return;
|
||||
}
|
||||
|
||||
onToggleSeriesVisibility(seriesIndex);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[onToggleSeriesVisibility, onToggleSeriesOnOff, getLegendItemIdFromEvent],
|
||||
);
|
||||
|
||||
const onFocusSeries = useCallback(
|
||||
(seriesIndex: number | null): void => {
|
||||
if (rafId.current != null) {
|
||||
cancelAnimationFrame(rafId.current);
|
||||
}
|
||||
rafId.current = requestAnimationFrame(() => {
|
||||
setFocusedSeriesIndex(seriesIndex);
|
||||
onFocusSeriesPlot(seriesIndex);
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[onFocusSeriesPlot],
|
||||
);
|
||||
|
||||
const onLegendMouseMove = (e: React.MouseEvent<HTMLDivElement>): void => {
|
||||
const legendItemId = getLegendItemIdFromEvent(e);
|
||||
const seriesIndex = legendItemId ? Number(legendItemId) : null;
|
||||
if (seriesIndex === focusedSeriesIndex) {
|
||||
return;
|
||||
}
|
||||
onFocusSeries(seriesIndex);
|
||||
};
|
||||
|
||||
const onLegendMouseLeave = useCallback(
|
||||
(): void => {
|
||||
// Cancel any pending RAF from handleFocusSeries to prevent race condition
|
||||
if (rafId.current != null) {
|
||||
cancelAnimationFrame(rafId.current);
|
||||
rafId.current = null;
|
||||
}
|
||||
setFocusedSeriesIndex(null);
|
||||
onFocusSeries(null);
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[onFocusSeries],
|
||||
);
|
||||
|
||||
// Cleanup pending animation frames on unmount
|
||||
useEffect(
|
||||
() => (): void => {
|
||||
if (rafId.current != null) {
|
||||
cancelAnimationFrame(rafId.current);
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
return {
|
||||
onLegendClick,
|
||||
onFocusSeries,
|
||||
onLegendMouseMove,
|
||||
onLegendMouseLeave,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
.uplot-tooltip-container {
|
||||
font-family: 'Inter';
|
||||
font-size: 12px;
|
||||
background: var(--bg-ink-300);
|
||||
-webkit-font-smoothing: antialiased;
|
||||
color: var(--bg-vanilla-100);
|
||||
border-radius: 6px;
|
||||
padding: 1rem 1rem 0.5rem 1rem;
|
||||
border: 1px solid var(--bg-ink-100);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 8px;
|
||||
|
||||
&.lightMode {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--bg-ink-500);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.uplot-tooltip-header {
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.uplot-tooltip-list-container {
|
||||
height: 100%;
|
||||
.uplot-tooltip-list {
|
||||
&::-webkit-scrollbar {
|
||||
width: 0.3rem;
|
||||
}
|
||||
&::-webkit-scrollbar-corner {
|
||||
background: transparent;
|
||||
}
|
||||
&::-webkit-scrollbar-thumb {
|
||||
background: rgb(136, 136, 136);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.uplot-tooltip-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
|
||||
.uplot-tooltip-item-marker {
|
||||
border-radius: 50%;
|
||||
border-width: 2px;
|
||||
width: 12px;
|
||||
height: 12px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.uplot-tooltip-item-content {
|
||||
white-space: wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
}
|
||||
}
|
||||
94
frontend/src/lib/uPlotV2/components/Tooltip/Tooltip.tsx
Normal file
94
frontend/src/lib/uPlotV2/components/Tooltip/Tooltip.tsx
Normal file
@@ -0,0 +1,94 @@
|
||||
import { useMemo } from 'react';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import dayjs from 'dayjs';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
|
||||
import { TooltipContentItem, TooltipProps } from '../types';
|
||||
import { buildTooltipContent } from './utils';
|
||||
|
||||
import './Tooltip.styles.scss';
|
||||
|
||||
const TOOLTIP_LIST_MAX_HEIGHT = 330;
|
||||
const TOOLTIP_ITEM_HEIGHT = 38;
|
||||
|
||||
export default function Tooltip({
|
||||
seriesIndex,
|
||||
dataIndexes,
|
||||
uPlotInstance,
|
||||
timezone,
|
||||
yAxisUnit = '',
|
||||
decimalPrecision,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const headerTitle = useMemo(() => {
|
||||
const data = uPlotInstance.data;
|
||||
const cursorIdx = uPlotInstance.cursor.idx;
|
||||
if (cursorIdx == null) {
|
||||
return null;
|
||||
}
|
||||
return dayjs(data[0][cursorIdx] * 1000)
|
||||
.tz(timezone)
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
}, [timezone, uPlotInstance.data, uPlotInstance.cursor.idx]);
|
||||
|
||||
const content = useMemo(
|
||||
(): TooltipContentItem[] =>
|
||||
buildTooltipContent({
|
||||
data: uPlotInstance.data,
|
||||
series: uPlotInstance.series,
|
||||
dataIndexes,
|
||||
activeSeriesIdx: seriesIndex,
|
||||
uPlotInstance,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
}),
|
||||
[uPlotInstance, seriesIndex, dataIndexes, yAxisUnit, decimalPrecision],
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cx(
|
||||
'uplot-tooltip-container',
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
>
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
content.length * TOOLTIP_ITEM_HEIGHT,
|
||||
TOOLTIP_LIST_MAX_HEIGHT,
|
||||
),
|
||||
minHeight: 0,
|
||||
}}
|
||||
>
|
||||
{content.length > 0 ? (
|
||||
<Virtuoso
|
||||
className="uplot-tooltip-list"
|
||||
data={content}
|
||||
defaultItemHeight={TOOLTIP_ITEM_HEIGHT}
|
||||
itemContent={(_, item): JSX.Element => (
|
||||
<div className="uplot-tooltip-item">
|
||||
<div
|
||||
className="uplot-tooltip-item-marker"
|
||||
style={{ borderColor: item.color }}
|
||||
data-is-legend-marker={true}
|
||||
/>
|
||||
<div
|
||||
className="uplot-tooltip-item-content"
|
||||
style={{ color: item.color, fontWeight: item.isActive ? 700 : 400 }}
|
||||
>
|
||||
{item.label}: {item.tooltipValue}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
76
frontend/src/lib/uPlotV2/components/Tooltip/utils.ts
Normal file
76
frontend/src/lib/uPlotV2/components/Tooltip/utils.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import uPlot, { AlignedData, Series } from 'uplot';
|
||||
|
||||
import { TooltipContentItem } from '../types';
|
||||
|
||||
const FALLBACK_SERIES_COLOR = '#000000';
|
||||
|
||||
export function resolveSeriesColor(
|
||||
stroke: Series.Stroke | undefined,
|
||||
u: uPlot,
|
||||
seriesIdx: number,
|
||||
): string {
|
||||
if (typeof stroke === 'function') {
|
||||
return String(stroke(u, seriesIdx));
|
||||
}
|
||||
if (typeof stroke === 'string') {
|
||||
return stroke;
|
||||
}
|
||||
return FALLBACK_SERIES_COLOR;
|
||||
}
|
||||
|
||||
export function buildTooltipContent({
|
||||
data,
|
||||
series,
|
||||
dataIndexes,
|
||||
activeSeriesIdx,
|
||||
uPlotInstance,
|
||||
yAxisUnit,
|
||||
decimalPrecision,
|
||||
}: {
|
||||
data: AlignedData;
|
||||
series: Series[];
|
||||
dataIndexes: Array<number | null>;
|
||||
activeSeriesIdx: number | null;
|
||||
uPlotInstance: uPlot;
|
||||
yAxisUnit: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
}): TooltipContentItem[] {
|
||||
const active: TooltipContentItem[] = [];
|
||||
const rest: TooltipContentItem[] = [];
|
||||
|
||||
for (let idx = 1; idx < series.length; idx += 1) {
|
||||
const s = series[idx];
|
||||
if (!s?.show) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dataIdx = dataIndexes[idx];
|
||||
// Skip series with no data at the current cursor position
|
||||
if (dataIdx === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const raw = data[idx]?.[dataIdx];
|
||||
const value = Number(raw);
|
||||
const displayValue = Number.isNaN(value) ? 0 : value;
|
||||
const isActive = idx === activeSeriesIdx;
|
||||
|
||||
const item: TooltipContentItem = {
|
||||
label: String(s.label ?? ''),
|
||||
value: displayValue,
|
||||
tooltipValue: getToolTipValue(displayValue, yAxisUnit, decimalPrecision),
|
||||
color: resolveSeriesColor(s.stroke, uPlotInstance, idx),
|
||||
isActive,
|
||||
};
|
||||
|
||||
if (isActive) {
|
||||
active.push(item);
|
||||
} else {
|
||||
rest.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
return [...active, ...rest];
|
||||
}
|
||||
199
frontend/src/lib/uPlotV2/components/UPlotChart.tsx
Normal file
199
frontend/src/lib/uPlotV2/components/UPlotChart.tsx
Normal file
@@ -0,0 +1,199 @@
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import { Typography } from 'antd';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { LineChart } from 'lucide-react';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import uPlot, { AlignedData, Options } from 'uplot';
|
||||
|
||||
import { UPlotConfigBuilder } from '../config/UPlotConfigBuilder';
|
||||
import { usePlotContext } from '../context/PlotContext';
|
||||
import { UPlotChartProps } from './types';
|
||||
|
||||
/**
|
||||
* Check if dimensions have changed
|
||||
*/
|
||||
function sameDimensions(prev: UPlotChartProps, next: UPlotChartProps): boolean {
|
||||
return next.width === prev.width && next.height === prev.height;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if data has changed (value equality)
|
||||
*/
|
||||
function sameData(prev: UPlotChartProps, next: UPlotChartProps): boolean {
|
||||
return isEqual(next.data, prev.data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if config builder has changed (value equality)
|
||||
*/
|
||||
function sameConfig(prev: UPlotChartProps, next: UPlotChartProps): boolean {
|
||||
return isEqual(next.config, prev.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Plot component for rendering uPlot charts using the builder pattern
|
||||
* Manages uPlot instance lifecycle and handles updates efficiently
|
||||
*/
|
||||
export default function UPlotChart({
|
||||
config,
|
||||
data,
|
||||
width,
|
||||
height,
|
||||
plotRef,
|
||||
onDestroy,
|
||||
children,
|
||||
'data-testid': testId = 'uplot-main-div',
|
||||
}: UPlotChartProps): JSX.Element {
|
||||
const { setPlotContextInitialState } = usePlotContext();
|
||||
const containerRef = useRef<HTMLDivElement>(null);
|
||||
const plotInstanceRef = useRef<uPlot | null>(null);
|
||||
const prevPropsRef = useRef<UPlotChartProps | null>(null);
|
||||
const configUsedForPlotRef = useRef<UPlotConfigBuilder | null>(null);
|
||||
|
||||
/**
|
||||
* Destroy the existing plot instance if present.
|
||||
*/
|
||||
const destroyPlot = useCallback((): void => {
|
||||
if (plotInstanceRef.current) {
|
||||
onDestroy?.(plotInstanceRef.current);
|
||||
// Clean up the config builder that was used to create this plot (not the current prop)
|
||||
if (configUsedForPlotRef.current) {
|
||||
configUsedForPlotRef.current.destroy();
|
||||
}
|
||||
configUsedForPlotRef.current = null;
|
||||
|
||||
plotInstanceRef.current.destroy();
|
||||
plotInstanceRef.current = null;
|
||||
setPlotContextInitialState({ uPlotInstance: null });
|
||||
plotRef?.(null);
|
||||
}
|
||||
}, [onDestroy, plotRef, setPlotContextInitialState]);
|
||||
|
||||
/**
|
||||
* Initialize or reinitialize the plot
|
||||
*/
|
||||
const createPlot = useCallback(() => {
|
||||
// Destroy existing plot first
|
||||
destroyPlot();
|
||||
|
||||
if (!containerRef.current || width === 0 || height === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Build configuration from builder
|
||||
const configOptions = config.getConfig();
|
||||
|
||||
// Merge with dimensions
|
||||
const plotConfig: Options = {
|
||||
width: Math.floor(width),
|
||||
height: Math.floor(height),
|
||||
...configOptions,
|
||||
} as Options;
|
||||
|
||||
// Create new plot instance
|
||||
const plot = new uPlot(plotConfig, data as AlignedData, containerRef.current);
|
||||
|
||||
if (plotRef) {
|
||||
plotRef(plot);
|
||||
}
|
||||
setPlotContextInitialState({
|
||||
uPlotInstance: plot,
|
||||
widgetId: config.getWidgetId(),
|
||||
});
|
||||
|
||||
plotInstanceRef.current = plot;
|
||||
configUsedForPlotRef.current = config;
|
||||
}, [
|
||||
config,
|
||||
data,
|
||||
width,
|
||||
height,
|
||||
plotRef,
|
||||
destroyPlot,
|
||||
setPlotContextInitialState,
|
||||
]);
|
||||
|
||||
/**
|
||||
* Destroy plot when data becomes empty to prevent memory leaks.
|
||||
* When the "No Data" UI is shown, the container div is unmounted,
|
||||
* but without this effect the plot instance would remain in memory.
|
||||
*/
|
||||
const isDataEmpty = useMemo(() => {
|
||||
return !!(data && data[0] && data[0].length === 0);
|
||||
}, [data]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isDataEmpty) {
|
||||
destroyPlot();
|
||||
}
|
||||
}, [isDataEmpty, destroyPlot]);
|
||||
|
||||
/**
|
||||
* Handle initialization and prop changes
|
||||
*/
|
||||
useEffect(() => {
|
||||
const prevProps = prevPropsRef.current;
|
||||
const currentProps = { config, data, width, height };
|
||||
|
||||
// First render - initialize
|
||||
if (!prevProps) {
|
||||
createPlot();
|
||||
prevPropsRef.current = currentProps;
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the plot instance's container has been unmounted (e.g., after "No Data" state)
|
||||
// If so, we need to recreate the plot with the new container
|
||||
const isPlotOrphaned =
|
||||
plotInstanceRef.current &&
|
||||
plotInstanceRef.current.root !== containerRef.current;
|
||||
|
||||
// Update dimensions without reinitializing if only size changed
|
||||
if (
|
||||
!sameDimensions(prevProps, currentProps) &&
|
||||
plotInstanceRef.current &&
|
||||
!isPlotOrphaned
|
||||
) {
|
||||
plotInstanceRef.current.setSize({
|
||||
width: Math.floor(width),
|
||||
height: Math.floor(height),
|
||||
});
|
||||
}
|
||||
|
||||
// Reinitialize if config changed or if the plot was orphaned (container changed)
|
||||
if (!sameConfig(prevProps, currentProps) || isPlotOrphaned) {
|
||||
createPlot();
|
||||
}
|
||||
// Update data if only data changed
|
||||
else if (!sameData(prevProps, currentProps) && plotInstanceRef.current) {
|
||||
plotInstanceRef.current.setData(data as AlignedData);
|
||||
}
|
||||
|
||||
prevPropsRef.current = currentProps;
|
||||
}, [config, data, width, height, createPlot]);
|
||||
|
||||
if (isDataEmpty) {
|
||||
return (
|
||||
<div
|
||||
className="uplot-no-data not-found"
|
||||
style={{
|
||||
width: `${width}px`,
|
||||
height: `${height}px`,
|
||||
}}
|
||||
>
|
||||
<LineChart size={48} strokeWidth={0.5} />
|
||||
<Typography>No Data</Typography>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div style={{ position: 'relative' }}>
|
||||
<div ref={containerRef} data-testid={testId} />
|
||||
{children}
|
||||
</div>
|
||||
</Sentry.ErrorBoundary>
|
||||
);
|
||||
}
|
||||
87
frontend/src/lib/uPlotV2/components/types.ts
Normal file
87
frontend/src/lib/uPlotV2/components/types.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { ReactNode } from 'react';
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import { UPlotConfigBuilder } from '../config/UPlotConfigBuilder';
|
||||
|
||||
/**
|
||||
* Props for the Plot component
|
||||
*/
|
||||
export interface UPlotChartProps {
|
||||
/**
|
||||
* uPlot configuration builder
|
||||
*/
|
||||
config: UPlotConfigBuilder;
|
||||
|
||||
/**
|
||||
* Chart data in uPlot.AlignedData format
|
||||
*/
|
||||
data: uPlot.AlignedData;
|
||||
|
||||
/**
|
||||
* Chart width in pixels
|
||||
*/
|
||||
width: number;
|
||||
|
||||
/**
|
||||
* Chart height in pixels
|
||||
*/
|
||||
height: number;
|
||||
|
||||
/**
|
||||
* Optional callback when plot instance is created or destroyed.
|
||||
* Called with the uPlot instance on create, and with null when the plot is destroyed.
|
||||
*/
|
||||
plotRef?: (u: uPlot | null) => void;
|
||||
|
||||
/**
|
||||
* Optional callback when plot is destroyed
|
||||
*/
|
||||
onDestroy?: (u: uPlot) => void;
|
||||
|
||||
/**
|
||||
* Children elements (typically plugins)
|
||||
*/
|
||||
children?: ReactNode;
|
||||
|
||||
/**
|
||||
* Test ID for the container div
|
||||
*/
|
||||
'data-testid'?: string;
|
||||
}
|
||||
|
||||
export interface TooltipRenderArgs {
|
||||
uPlotInstance: uPlot;
|
||||
dataIndexes: Array<number | null>;
|
||||
seriesIndex: number | null;
|
||||
isPinned: boolean;
|
||||
dismiss: () => void;
|
||||
viaSync: boolean;
|
||||
}
|
||||
|
||||
export type TooltipProps = TooltipRenderArgs & {
|
||||
timezone: string;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
};
|
||||
|
||||
export enum LegendPosition {
|
||||
BOTTOM = 'bottom',
|
||||
RIGHT = 'right',
|
||||
}
|
||||
export interface LegendConfig {
|
||||
position: LegendPosition;
|
||||
}
|
||||
export interface LegendProps {
|
||||
position?: LegendPosition;
|
||||
config: UPlotConfigBuilder;
|
||||
legendsPerSet?: number;
|
||||
}
|
||||
|
||||
export interface TooltipContentItem {
|
||||
label: string;
|
||||
value: number;
|
||||
tooltipValue: string;
|
||||
color: string;
|
||||
isActive: boolean;
|
||||
}
|
||||
284
frontend/src/lib/uPlotV2/config/UPlotAxisBuilder.ts
Normal file
284
frontend/src/lib/uPlotV2/config/UPlotAxisBuilder.ts
Normal file
@@ -0,0 +1,284 @@
|
||||
import { getToolTipValue } from 'components/Graph/yAxisConfig';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import uPlot, { Axis } from 'uplot';
|
||||
|
||||
import { uPlotXAxisValuesFormat } from '../../uPlotLib/utils/constants';
|
||||
import getGridColor from '../../uPlotLib/utils/getGridColor';
|
||||
import { AxisProps, ConfigBuilder } from './types';
|
||||
|
||||
const PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT = [
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
PANEL_TYPES.BAR,
|
||||
PANEL_TYPES.PIE,
|
||||
];
|
||||
|
||||
/**
|
||||
* Builder for uPlot axis configuration
|
||||
* Handles creation and merging of axis settings
|
||||
* Based on getAxes utility function patterns
|
||||
*/
|
||||
export class UPlotAxisBuilder extends ConfigBuilder<AxisProps, Axis> {
|
||||
/**
|
||||
* Build grid configuration based on theme and scale type.
|
||||
* Supports partial grid config: provided values override defaults.
|
||||
*/
|
||||
private buildGridConfig(): uPlot.Axis.Grid | undefined {
|
||||
const { grid, isDarkMode, isLogScale } = this.props;
|
||||
|
||||
const defaultStroke = getGridColor(isDarkMode ?? false);
|
||||
const defaultWidth = isLogScale ? 0.1 : 0.2;
|
||||
const defaultShow = true;
|
||||
|
||||
// Merge partial or full grid config with defaults
|
||||
if (grid) {
|
||||
return {
|
||||
stroke: grid.stroke ?? defaultStroke,
|
||||
width: grid.width ?? defaultWidth,
|
||||
show: grid.show ?? defaultShow,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
stroke: defaultStroke,
|
||||
width: defaultWidth,
|
||||
show: defaultShow,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ticks configuration
|
||||
*/
|
||||
private buildTicksConfig(): uPlot.Axis.Ticks | undefined {
|
||||
const { ticks } = this.props;
|
||||
|
||||
// If explicit ticks config provided, use it
|
||||
if (ticks) {
|
||||
return ticks;
|
||||
}
|
||||
|
||||
// Build default ticks config
|
||||
return {
|
||||
width: 0.3,
|
||||
show: true,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build values formatter for X-axis (time)
|
||||
*/
|
||||
private buildXAxisValuesFormatter(): uPlot.Axis.Values | undefined {
|
||||
const { panelType } = this.props;
|
||||
|
||||
if (
|
||||
panelType &&
|
||||
PANEL_TYPES_WITH_X_AXIS_DATETIME_FORMAT.includes(panelType)
|
||||
) {
|
||||
return uPlotXAxisValuesFormat as uPlot.Axis.Values;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build values formatter for Y-axis (values with units)
|
||||
*/
|
||||
private buildYAxisValuesFormatter(): uPlot.Axis.Values {
|
||||
const { yAxisUnit, decimalPrecision } = this.props;
|
||||
|
||||
return (_, t): string[] =>
|
||||
t.map((v) => {
|
||||
if (v === null || v === undefined || Number.isNaN(v)) {
|
||||
return '';
|
||||
}
|
||||
const value = getToolTipValue(v.toString(), yAxisUnit, decimalPrecision);
|
||||
return `${value}`;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Build values formatter based on axis type and props
|
||||
*/
|
||||
private buildValuesFormatter(): uPlot.Axis.Values | undefined {
|
||||
const { values, scaleKey } = this.props;
|
||||
|
||||
// If explicit values formatter provided, use it
|
||||
if (values) {
|
||||
return values;
|
||||
}
|
||||
|
||||
// Route to appropriate formatter based on scale key
|
||||
return scaleKey === 'x'
|
||||
? this.buildXAxisValuesFormatter()
|
||||
: scaleKey === 'y'
|
||||
? this.buildYAxisValuesFormatter()
|
||||
: undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate axis size from existing size property
|
||||
*/
|
||||
private getExistingAxisSize(
|
||||
self: uPlot,
|
||||
axis: Axis,
|
||||
values: string[] | undefined,
|
||||
axisIdx: number,
|
||||
cycleNum: number,
|
||||
): number {
|
||||
const internalSize = (axis as { _size?: number })._size;
|
||||
if (internalSize !== undefined) {
|
||||
return internalSize;
|
||||
}
|
||||
|
||||
const existingSize = axis.size;
|
||||
if (typeof existingSize === 'function') {
|
||||
return existingSize(self, values ?? [], axisIdx, cycleNum);
|
||||
}
|
||||
|
||||
return existingSize ?? 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate text width for longest value
|
||||
*/
|
||||
private calculateTextWidth(
|
||||
self: uPlot,
|
||||
axis: Axis,
|
||||
values: string[] | undefined,
|
||||
): number {
|
||||
if (!values || values.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Find longest value
|
||||
const longestVal = values.reduce(
|
||||
(acc, val) => (val.length > acc.length ? val : acc),
|
||||
'',
|
||||
);
|
||||
|
||||
if (longestVal === '' || !axis.font?.[0]) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line prefer-destructuring, no-param-reassign
|
||||
self.ctx.font = axis.font[0];
|
||||
return self.ctx.measureText(longestVal).width / devicePixelRatio;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build Y-axis dynamic size calculator
|
||||
*/
|
||||
private buildYAxisSizeCalculator(): uPlot.Axis.Size {
|
||||
return (
|
||||
self: uPlot,
|
||||
values: string[] | undefined,
|
||||
axisIdx: number,
|
||||
cycleNum: number,
|
||||
): number => {
|
||||
const axis = self.axes[axisIdx];
|
||||
|
||||
// Bail out, force convergence
|
||||
if (cycleNum > 1) {
|
||||
return this.getExistingAxisSize(self, axis, values, axisIdx, cycleNum);
|
||||
}
|
||||
|
||||
const gap = this.props.gap ?? 5;
|
||||
let axisSize = (axis.ticks?.size ?? 0) + gap;
|
||||
axisSize += this.calculateTextWidth(self, axis, values);
|
||||
|
||||
return Math.ceil(axisSize);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build dynamic size calculator for Y-axis
|
||||
*/
|
||||
private buildSizeCalculator(): uPlot.Axis.Size | undefined {
|
||||
const { size, scaleKey } = this.props;
|
||||
|
||||
// If explicit size calculator provided, use it
|
||||
if (size) {
|
||||
return size;
|
||||
}
|
||||
|
||||
// Y-axis needs dynamic sizing based on text width
|
||||
if (scaleKey === 'y') {
|
||||
return this.buildYAxisSizeCalculator();
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build stroke color based on props
|
||||
*/
|
||||
private buildStrokeColor(): string | undefined {
|
||||
const { stroke, isDarkMode } = this.props;
|
||||
|
||||
if (stroke !== undefined) {
|
||||
return stroke;
|
||||
}
|
||||
|
||||
if (isDarkMode !== undefined) {
|
||||
return isDarkMode ? 'white' : 'black';
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
getConfig(): Axis {
|
||||
const {
|
||||
scaleKey,
|
||||
label,
|
||||
show = true,
|
||||
side = 2, // bottom by default
|
||||
space,
|
||||
gap = 5, // default gap is 5
|
||||
} = this.props;
|
||||
|
||||
const grid = this.buildGridConfig();
|
||||
const ticks = this.buildTicksConfig();
|
||||
const values = this.buildValuesFormatter();
|
||||
const size = this.buildSizeCalculator();
|
||||
const stroke = this.buildStrokeColor();
|
||||
|
||||
const axisConfig: Axis = {
|
||||
scale: scaleKey,
|
||||
show,
|
||||
side,
|
||||
};
|
||||
|
||||
// Add properties conditionally
|
||||
if (label) {
|
||||
axisConfig.label = label;
|
||||
}
|
||||
if (stroke) {
|
||||
axisConfig.stroke = stroke;
|
||||
}
|
||||
if (grid) {
|
||||
axisConfig.grid = grid;
|
||||
}
|
||||
if (ticks) {
|
||||
axisConfig.ticks = ticks;
|
||||
}
|
||||
if (values) {
|
||||
axisConfig.values = values;
|
||||
}
|
||||
if (gap !== undefined) {
|
||||
axisConfig.gap = gap;
|
||||
}
|
||||
if (space !== undefined) {
|
||||
axisConfig.space = space;
|
||||
}
|
||||
if (size) {
|
||||
axisConfig.size = size;
|
||||
}
|
||||
|
||||
return axisConfig;
|
||||
}
|
||||
|
||||
merge(props: Partial<AxisProps>): void {
|
||||
this.props = { ...this.props, ...props };
|
||||
}
|
||||
}
|
||||
|
||||
export type { AxisProps };
|
||||
293
frontend/src/lib/uPlotV2/config/UPlotConfigBuilder.ts
Normal file
293
frontend/src/lib/uPlotV2/config/UPlotConfigBuilder.ts
Normal file
@@ -0,0 +1,293 @@
|
||||
import { getStoredSeriesVisibility } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
|
||||
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
|
||||
import { thresholdsDrawHook } from 'lib/uPlotV2/hooks/useThresholdsDrawHook';
|
||||
import { merge } from 'lodash-es';
|
||||
import noop from 'lodash-es/noop';
|
||||
import uPlot, { Cursor, Hooks, Options } from 'uplot';
|
||||
|
||||
import {
|
||||
ConfigBuilder,
|
||||
ConfigBuilderProps,
|
||||
DEFAULT_CURSOR_CONFIG,
|
||||
DEFAULT_PLOT_CONFIG,
|
||||
LegendItem,
|
||||
} from './types';
|
||||
import { AxisProps, UPlotAxisBuilder } from './UPlotAxisBuilder';
|
||||
import { ScaleProps, UPlotScaleBuilder } from './UPlotScaleBuilder';
|
||||
import { SeriesProps, UPlotSeriesBuilder } from './UPlotSeriesBuilder';
|
||||
|
||||
/**
|
||||
* Type definitions for uPlot option objects
|
||||
*/
|
||||
type LegendConfig = {
|
||||
show?: boolean;
|
||||
live?: boolean;
|
||||
isolate?: boolean;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
|
||||
/**
|
||||
* Main builder orchestrator for uPlot configuration
|
||||
* Manages axes, scales, series, and hooks in a composable way
|
||||
*/
|
||||
export class UPlotConfigBuilder extends ConfigBuilder<
|
||||
ConfigBuilderProps,
|
||||
Partial<Options>
|
||||
> {
|
||||
series: UPlotSeriesBuilder[] = [];
|
||||
|
||||
private axes: Record<string, UPlotAxisBuilder> = {};
|
||||
|
||||
readonly scales: UPlotScaleBuilder[] = [];
|
||||
|
||||
private bands: uPlot.Band[] = [];
|
||||
|
||||
private cursor: Cursor | undefined;
|
||||
|
||||
private hooks: Hooks.Arrays = {};
|
||||
|
||||
private plugins: uPlot.Plugin[] = [];
|
||||
|
||||
private padding: [number, number, number, number] | undefined;
|
||||
|
||||
private legend: LegendConfig | undefined;
|
||||
|
||||
private focus: uPlot.Focus | undefined;
|
||||
|
||||
private select: uPlot.Select | undefined;
|
||||
|
||||
private thresholds: Record<string, ThresholdsDrawHookOptions> = {};
|
||||
|
||||
private tzDate: ((timestamp: number) => Date) | undefined;
|
||||
|
||||
private widgetId: string | undefined;
|
||||
|
||||
private onDragSelect: (startTime: number, endTime: number) => void;
|
||||
|
||||
private cleanups: Array<() => void> = [];
|
||||
|
||||
constructor(args?: ConfigBuilderProps) {
|
||||
super(args ?? {});
|
||||
const { widgetId, onDragSelect, tzDate } = args ?? {};
|
||||
if (widgetId) {
|
||||
this.widgetId = widgetId;
|
||||
}
|
||||
|
||||
if (tzDate) {
|
||||
this.tzDate = tzDate;
|
||||
}
|
||||
|
||||
this.onDragSelect = noop;
|
||||
|
||||
if (onDragSelect) {
|
||||
this.onDragSelect = onDragSelect;
|
||||
// Add a hook to handle the select event
|
||||
const cleanup = this.addHook('setSelect', (self: uPlot): void => {
|
||||
const selection = self.select;
|
||||
// Only trigger onDragSelect when there's an actual drag range (width > 0)
|
||||
// A click without dragging produces width === 0, which should be ignored
|
||||
if (selection && selection.width > 0) {
|
||||
const startTime = self.posToVal(selection.left, 'x');
|
||||
const endTime = self.posToVal(selection.left + selection.width, 'x');
|
||||
this.onDragSelect(startTime * 1000, endTime * 1000);
|
||||
}
|
||||
});
|
||||
this.cleanups.push(cleanup);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add or merge an axis configuration
|
||||
*/
|
||||
addAxis(props: AxisProps): void {
|
||||
const { scaleKey } = props;
|
||||
if (this.axes[scaleKey]) {
|
||||
this.axes[scaleKey].merge?.(props);
|
||||
return;
|
||||
}
|
||||
this.axes[scaleKey] = new UPlotAxisBuilder(props);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add or merge a scale configuration
|
||||
*/
|
||||
addScale(props: ScaleProps): void {
|
||||
const current = this.scales.find((v) => v.props.scaleKey === props.scaleKey);
|
||||
if (current) {
|
||||
current.merge?.(props);
|
||||
return;
|
||||
}
|
||||
this.scales.push(new UPlotScaleBuilder(props));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a series configuration
|
||||
*/
|
||||
addSeries(props: SeriesProps): void {
|
||||
this.series.push(new UPlotSeriesBuilder(props));
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a hook for extensibility
|
||||
*/
|
||||
addHook<T extends keyof Hooks.Defs>(type: T, hook: Hooks.Defs[T]): () => void {
|
||||
if (!this.hooks[type]) {
|
||||
this.hooks[type] = [];
|
||||
}
|
||||
(this.hooks[type] as Hooks.Defs[T][]).push(hook);
|
||||
|
||||
// Return a function to remove the hook when the component unmounts
|
||||
return (): void => {
|
||||
const idx = (this.hooks[type] as Hooks.Defs[T][]).indexOf(hook);
|
||||
if (idx !== -1) {
|
||||
(this.hooks[type] as Hooks.Defs[T][]).splice(idx, 1);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a plugin
|
||||
*/
|
||||
addPlugin(plugin: uPlot.Plugin): void {
|
||||
this.plugins.push(plugin);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add thresholds configuration
|
||||
*/
|
||||
addThresholds(options: ThresholdsDrawHookOptions): void {
|
||||
if (!this.thresholds[options.scaleKey]) {
|
||||
this.thresholds[options.scaleKey] = options;
|
||||
const cleanup = this.addHook('draw', thresholdsDrawHook(options));
|
||||
this.cleanups.push(cleanup);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set bands for stacked charts
|
||||
*/
|
||||
setBands(bands: uPlot.Band[]): void {
|
||||
this.bands = bands;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set cursor configuration
|
||||
*/
|
||||
setCursor(cursor: Cursor): void {
|
||||
this.cursor = merge({}, this.cursor, cursor);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set padding
|
||||
*/
|
||||
setPadding(padding: [number, number, number, number]): void {
|
||||
this.padding = padding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set legend configuration
|
||||
*/
|
||||
setLegend(legend: LegendConfig): void {
|
||||
this.legend = legend;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set focus configuration
|
||||
*/
|
||||
setFocus(focus: uPlot.Focus): void {
|
||||
this.focus = focus;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set select configuration
|
||||
*/
|
||||
setSelect(select: uPlot.Select): void {
|
||||
this.select = select;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set timezone date function
|
||||
*/
|
||||
setTzDate(tzDate: (timestamp: number) => Date): void {
|
||||
this.tzDate = tzDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get legend items with visibility state restored from localStorage if available
|
||||
*/
|
||||
getLegendItems(): Record<number, LegendItem> {
|
||||
const visibilityMap = this.widgetId
|
||||
? getStoredSeriesVisibility(this.widgetId)
|
||||
: null;
|
||||
return this.series.reduce((acc, s: UPlotSeriesBuilder, index: number) => {
|
||||
const seriesConfig = s.getConfig();
|
||||
const label = seriesConfig.label ?? '';
|
||||
const seriesIndex = index + 1; // +1 because the first series is the timestamp
|
||||
|
||||
// Priority: stored visibility > series config > default (true)
|
||||
const show = visibilityMap?.get(label) ?? seriesConfig.show ?? true;
|
||||
|
||||
acc[seriesIndex] = {
|
||||
seriesIndex,
|
||||
color: seriesConfig.stroke,
|
||||
label,
|
||||
show,
|
||||
};
|
||||
|
||||
return acc;
|
||||
}, {} as Record<number, LegendItem>);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all hooks and cleanup functions
|
||||
*/
|
||||
destroy(): void {
|
||||
this.cleanups.forEach((cleanup) => cleanup());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the widget id
|
||||
*/
|
||||
getWidgetId(): string | undefined {
|
||||
return this.widgetId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the final uPlot.Options configuration
|
||||
*/
|
||||
getConfig(): Partial<Options> {
|
||||
const config: Partial<Options> = {
|
||||
...DEFAULT_PLOT_CONFIG,
|
||||
};
|
||||
|
||||
config.series = [
|
||||
{ value: (): string => '' }, // Base series for timestamp
|
||||
...this.series.map((s) => s.getConfig()),
|
||||
];
|
||||
config.axes = Object.values(this.axes).map((a) => a.getConfig());
|
||||
config.scales = this.scales.reduce(
|
||||
(acc, s) => ({ ...acc, ...s.getConfig() }),
|
||||
{} as Record<string, uPlot.Scale>,
|
||||
);
|
||||
|
||||
config.hooks = this.hooks;
|
||||
config.select = this.select;
|
||||
|
||||
config.cursor = merge({}, DEFAULT_CURSOR_CONFIG, this.cursor);
|
||||
config.tzDate = this.tzDate;
|
||||
config.plugins = this.plugins.length > 0 ? this.plugins : undefined;
|
||||
config.bands = this.bands.length > 0 ? this.bands : undefined;
|
||||
|
||||
if (Array.isArray(this.padding)) {
|
||||
config.padding = this.padding;
|
||||
}
|
||||
if (this.legend) {
|
||||
config.legend = this.legend;
|
||||
}
|
||||
if (this.focus) {
|
||||
config.focus = this.focus;
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
}
|
||||
157
frontend/src/lib/uPlotV2/config/UPlotScaleBuilder.ts
Normal file
157
frontend/src/lib/uPlotV2/config/UPlotScaleBuilder.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
import { Scale } from 'uplot';
|
||||
|
||||
import {
|
||||
adjustSoftLimitsWithThresholds,
|
||||
createRangeFunction,
|
||||
getDistributionConfig,
|
||||
getFallbackMinMaxTimeStamp,
|
||||
getRangeConfig,
|
||||
normalizeLogScaleLimits,
|
||||
} from '../utils/scale';
|
||||
import { ConfigBuilder, ScaleProps } from './types';
|
||||
|
||||
/**
|
||||
* Builder for uPlot scale configuration
|
||||
* Handles creation and merging of scale settings
|
||||
*/
|
||||
export class UPlotScaleBuilder extends ConfigBuilder<
|
||||
ScaleProps,
|
||||
Record<string, Scale>
|
||||
> {
|
||||
private softMin: number | null;
|
||||
private softMax: number | null;
|
||||
private min: number | null;
|
||||
private max: number | null;
|
||||
|
||||
constructor(props: ScaleProps) {
|
||||
super(props);
|
||||
// By default while creating a widget we set the softMin and softMax to 0, so we need to handle this case separately
|
||||
const isDefaultSoftMinMax = props.softMin === 0 && props.softMax === 0;
|
||||
this.softMin = isDefaultSoftMinMax ? null : props.softMin ?? null;
|
||||
this.softMax = isDefaultSoftMinMax ? null : props.softMax ?? null;
|
||||
this.min = props.min ?? null;
|
||||
this.max = props.max ?? null;
|
||||
}
|
||||
|
||||
getConfig(): Record<string, Scale> {
|
||||
const {
|
||||
scaleKey,
|
||||
time,
|
||||
range,
|
||||
thresholds,
|
||||
logBase = 10,
|
||||
padMinBy = 0.1,
|
||||
padMaxBy = 0.1,
|
||||
} = this.props;
|
||||
|
||||
// Special handling for time scales (X axis)
|
||||
if (time) {
|
||||
let minTime = this.min ?? 0;
|
||||
let maxTime = this.max ?? 0;
|
||||
|
||||
// Fallback when min/max are not provided
|
||||
if (!minTime || !maxTime) {
|
||||
const { fallbackMin, fallbackMax } = getFallbackMinMaxTimeStamp();
|
||||
minTime = fallbackMin;
|
||||
maxTime = fallbackMax;
|
||||
}
|
||||
|
||||
// Align max time to "endTime - 1 minute", rounded down to minute precision
|
||||
// This matches legacy getXAxisScale behavior and avoids empty space at the right edge
|
||||
const oneMinuteAgoTimestamp = (maxTime - 60) * 1000;
|
||||
const currentDate = new Date(oneMinuteAgoTimestamp);
|
||||
|
||||
currentDate.setSeconds(0);
|
||||
currentDate.setMilliseconds(0);
|
||||
|
||||
const unixTimestampSeconds = Math.floor(currentDate.getTime() / 1000);
|
||||
maxTime = unixTimestampSeconds;
|
||||
|
||||
return {
|
||||
[scaleKey]: {
|
||||
time: true,
|
||||
auto: false,
|
||||
range: [minTime, maxTime],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const distr = this.props.distribution;
|
||||
|
||||
// Adjust softMin/softMax to include threshold values
|
||||
// This ensures threshold lines are visible within the scale range
|
||||
const thresholdList = thresholds?.thresholds;
|
||||
const {
|
||||
softMin: adjustedSoftMin,
|
||||
softMax: adjustedSoftMax,
|
||||
} = adjustSoftLimitsWithThresholds(
|
||||
this.softMin,
|
||||
this.softMax,
|
||||
thresholdList,
|
||||
thresholds?.yAxisUnit,
|
||||
);
|
||||
|
||||
const { min, max, softMin, softMax } = normalizeLogScaleLimits({
|
||||
distr,
|
||||
logBase,
|
||||
limits: {
|
||||
min: this.min,
|
||||
max: this.max,
|
||||
softMin: adjustedSoftMin,
|
||||
softMax: adjustedSoftMax,
|
||||
},
|
||||
});
|
||||
|
||||
const distribution = getDistributionConfig({
|
||||
time,
|
||||
distr,
|
||||
logBase,
|
||||
});
|
||||
|
||||
const {
|
||||
rangeConfig,
|
||||
hardMinOnly,
|
||||
hardMaxOnly,
|
||||
hasFixedRange,
|
||||
} = getRangeConfig(min, max, softMin, softMax, padMinBy, padMaxBy);
|
||||
|
||||
const rangeFn = createRangeFunction({
|
||||
rangeConfig,
|
||||
hardMinOnly,
|
||||
hardMaxOnly,
|
||||
hasFixedRange,
|
||||
min,
|
||||
max,
|
||||
});
|
||||
|
||||
let auto = this.props.auto;
|
||||
auto ??= !time && !hasFixedRange;
|
||||
|
||||
return {
|
||||
[scaleKey]: {
|
||||
time,
|
||||
auto,
|
||||
range: range ?? rangeFn,
|
||||
...distribution,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
merge(props: Partial<ScaleProps>): void {
|
||||
this.props = { ...this.props, ...props };
|
||||
if (props.softMin !== undefined) {
|
||||
this.softMin = props.softMin ?? null;
|
||||
}
|
||||
if (props.softMax !== undefined) {
|
||||
this.softMax = props.softMax ?? null;
|
||||
}
|
||||
if (props.min !== undefined) {
|
||||
this.min = props.min ?? null;
|
||||
}
|
||||
if (props.max !== undefined) {
|
||||
this.max = props.max ?? null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export type { ScaleProps };
|
||||
232
frontend/src/lib/uPlotV2/config/UPlotSeriesBuilder.ts
Normal file
232
frontend/src/lib/uPlotV2/config/UPlotSeriesBuilder.ts
Normal file
@@ -0,0 +1,232 @@
|
||||
import { themeColors } from 'constants/theme';
|
||||
import { generateColor } from 'lib/uPlotLib/utils/generateColor';
|
||||
import uPlot, { Series } from 'uplot';
|
||||
|
||||
import {
|
||||
ConfigBuilder,
|
||||
DrawStyle,
|
||||
FillStyle,
|
||||
LineInterpolation,
|
||||
SeriesProps,
|
||||
VisibilityMode,
|
||||
} from './types';
|
||||
|
||||
/**
|
||||
* Builder for uPlot series configuration
|
||||
* Handles creation of series settings
|
||||
*/
|
||||
export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
private buildLineConfig(
|
||||
lineColor: string,
|
||||
lineWidth?: number,
|
||||
lineStyle?: { fill?: FillStyle; dash?: number[] },
|
||||
): Partial<Series> {
|
||||
const lineConfig: Partial<Series> = {
|
||||
stroke: lineColor,
|
||||
width: lineWidth ?? 2,
|
||||
};
|
||||
|
||||
if (lineStyle && lineStyle.fill !== FillStyle.Solid) {
|
||||
if (lineStyle.fill === FillStyle.Dot) {
|
||||
lineConfig.cap = 'round';
|
||||
}
|
||||
lineConfig.dash = lineStyle.dash ?? [10, 10];
|
||||
}
|
||||
|
||||
return lineConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build path configuration
|
||||
*/
|
||||
private buildPathConfig({
|
||||
pathBuilder,
|
||||
drawStyle,
|
||||
lineInterpolation,
|
||||
}: {
|
||||
pathBuilder?: Series.PathBuilder | null;
|
||||
drawStyle: DrawStyle;
|
||||
lineInterpolation?: LineInterpolation;
|
||||
}): Partial<Series> {
|
||||
if (pathBuilder) {
|
||||
return { paths: pathBuilder };
|
||||
}
|
||||
|
||||
if (drawStyle === DrawStyle.Points) {
|
||||
return { paths: (): null => null };
|
||||
}
|
||||
|
||||
if (drawStyle !== null) {
|
||||
return {
|
||||
paths: (
|
||||
self: uPlot,
|
||||
seriesIdx: number,
|
||||
idx0: number,
|
||||
idx1: number,
|
||||
): Series.Paths | null => {
|
||||
const pathsBuilder = getPathBuilder(drawStyle, lineInterpolation);
|
||||
|
||||
return pathsBuilder(self, seriesIdx, idx0, idx1);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build points configuration
|
||||
*/
|
||||
private buildPointsConfig({
|
||||
lineColor,
|
||||
lineWidth,
|
||||
pointSize,
|
||||
pointsBuilder,
|
||||
pointsFilter,
|
||||
drawStyle,
|
||||
showPoints,
|
||||
}: {
|
||||
lineColor: string;
|
||||
lineWidth?: number;
|
||||
pointSize?: number;
|
||||
pointsBuilder: Series.Points.Show | null;
|
||||
pointsFilter: Series.Points.Filter | null;
|
||||
drawStyle: DrawStyle;
|
||||
showPoints?: VisibilityMode;
|
||||
}): Partial<Series.Points> {
|
||||
const pointsConfig: Partial<Series.Points> = {
|
||||
stroke: lineColor,
|
||||
fill: lineColor,
|
||||
size: !pointSize || pointSize < (lineWidth ?? 2) ? undefined : pointSize,
|
||||
filter: pointsFilter || undefined,
|
||||
};
|
||||
|
||||
if (pointsBuilder) {
|
||||
pointsConfig.show = pointsBuilder;
|
||||
} else if (drawStyle === DrawStyle.Points) {
|
||||
pointsConfig.show = true;
|
||||
} else if (showPoints === VisibilityMode.Never) {
|
||||
pointsConfig.show = false;
|
||||
} else if (showPoints === VisibilityMode.Always) {
|
||||
pointsConfig.show = true;
|
||||
}
|
||||
|
||||
return pointsConfig;
|
||||
}
|
||||
|
||||
private getLineColor(): string {
|
||||
const { colorMapping, label, lineColor, isDarkMode } = this.props;
|
||||
if (!label) {
|
||||
return lineColor ?? (isDarkMode ? themeColors.white : themeColors.black);
|
||||
}
|
||||
return (
|
||||
lineColor ??
|
||||
colorMapping[label] ??
|
||||
generateColor(
|
||||
label,
|
||||
isDarkMode ? themeColors.chartcolors : themeColors.lightModeColor,
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
getConfig(): Series {
|
||||
const {
|
||||
drawStyle,
|
||||
pathBuilder,
|
||||
pointsBuilder,
|
||||
pointsFilter,
|
||||
lineInterpolation,
|
||||
lineWidth,
|
||||
lineStyle,
|
||||
showPoints,
|
||||
pointSize,
|
||||
scaleKey,
|
||||
label,
|
||||
spanGaps,
|
||||
show = true,
|
||||
} = this.props;
|
||||
|
||||
const lineColor = this.getLineColor();
|
||||
|
||||
const lineConfig = this.buildLineConfig(lineColor, lineWidth, lineStyle);
|
||||
const pathConfig = this.buildPathConfig({
|
||||
pathBuilder,
|
||||
drawStyle,
|
||||
lineInterpolation,
|
||||
});
|
||||
const pointsConfig = this.buildPointsConfig({
|
||||
lineColor,
|
||||
lineWidth,
|
||||
pointSize,
|
||||
pointsBuilder: pointsBuilder ?? null,
|
||||
pointsFilter: pointsFilter ?? null,
|
||||
drawStyle,
|
||||
showPoints,
|
||||
});
|
||||
|
||||
return {
|
||||
scale: scaleKey,
|
||||
label,
|
||||
spanGaps: typeof spanGaps === 'boolean' ? spanGaps : false,
|
||||
value: (): string => '',
|
||||
pxAlign: true,
|
||||
show,
|
||||
...lineConfig,
|
||||
...pathConfig,
|
||||
points: Object.keys(pointsConfig).length > 0 ? pointsConfig : undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
interface PathBuilders {
|
||||
linear: Series.PathBuilder;
|
||||
spline: Series.PathBuilder;
|
||||
stepBefore: Series.PathBuilder;
|
||||
stepAfter: Series.PathBuilder;
|
||||
[key: string]: Series.PathBuilder;
|
||||
}
|
||||
|
||||
let builders: PathBuilders | null = null;
|
||||
|
||||
/**
|
||||
* Get path builder based on draw style and interpolation
|
||||
*/
|
||||
function getPathBuilder(
|
||||
style: DrawStyle,
|
||||
lineInterpolation?: LineInterpolation,
|
||||
): Series.PathBuilder {
|
||||
const pathBuilders = uPlot.paths;
|
||||
|
||||
if (!builders) {
|
||||
const linearBuilder = pathBuilders.linear;
|
||||
const splineBuilder = pathBuilders.spline;
|
||||
const steppedBuilder = pathBuilders.stepped;
|
||||
|
||||
if (!linearBuilder || !splineBuilder || !steppedBuilder) {
|
||||
throw new Error('Required uPlot path builders are not available');
|
||||
}
|
||||
|
||||
builders = {
|
||||
linear: linearBuilder(),
|
||||
spline: splineBuilder(),
|
||||
stepBefore: steppedBuilder({ align: -1 }),
|
||||
stepAfter: steppedBuilder({ align: 1 }),
|
||||
};
|
||||
}
|
||||
|
||||
if (style === DrawStyle.Line) {
|
||||
if (lineInterpolation === LineInterpolation.StepBefore) {
|
||||
return builders.stepBefore;
|
||||
}
|
||||
if (lineInterpolation === LineInterpolation.StepAfter) {
|
||||
return builders.stepAfter;
|
||||
}
|
||||
if (lineInterpolation === LineInterpolation.Linear) {
|
||||
return builders.linear;
|
||||
}
|
||||
}
|
||||
|
||||
return builders.spline;
|
||||
}
|
||||
|
||||
export type { SeriesProps };
|
||||
200
frontend/src/lib/uPlotV2/config/types.ts
Normal file
200
frontend/src/lib/uPlotV2/config/types.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import uPlot, { Cursor, Options, Series } from 'uplot';
|
||||
|
||||
import { ThresholdsDrawHookOptions } from '../hooks/types';
|
||||
|
||||
/**
|
||||
* Base abstract class for all configuration builders
|
||||
* Provides a common interface for building uPlot configuration components
|
||||
*/
|
||||
export abstract class ConfigBuilder<P, T> {
|
||||
constructor(public props: P) {}
|
||||
|
||||
/**
|
||||
* Builds and returns the configuration object
|
||||
*/
|
||||
abstract getConfig(): T;
|
||||
|
||||
/**
|
||||
* Merges additional properties into the existing configuration
|
||||
*/
|
||||
merge?(props: Partial<P>): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for configuring the uPlot config builder
|
||||
*/
|
||||
export interface ConfigBuilderProps {
|
||||
widgetId?: string;
|
||||
onDragSelect?: (startTime: number, endTime: number) => void;
|
||||
tzDate?: uPlot.LocalDateFromUnix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for configuring an axis
|
||||
*/
|
||||
export interface AxisProps {
|
||||
scaleKey: string;
|
||||
label?: string;
|
||||
show?: boolean;
|
||||
side?: 0 | 1 | 2 | 3; // top, right, bottom, left
|
||||
stroke?: string;
|
||||
grid?: {
|
||||
stroke?: string;
|
||||
width?: number;
|
||||
show?: boolean;
|
||||
};
|
||||
ticks?: {
|
||||
stroke?: string;
|
||||
width?: number;
|
||||
show?: boolean;
|
||||
size?: number;
|
||||
};
|
||||
values?: uPlot.Axis.Values;
|
||||
gap?: number;
|
||||
size?: uPlot.Axis.Size;
|
||||
formatValue?: (v: number) => string;
|
||||
space?: number; // Space for log scale axes
|
||||
isDarkMode?: boolean;
|
||||
isLogScale?: boolean;
|
||||
yAxisUnit?: string;
|
||||
panelType?: PANEL_TYPES;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for configuring a scale
|
||||
*/
|
||||
|
||||
export enum DistributionType {
|
||||
Linear = 'linear',
|
||||
Logarithmic = 'logarithmic',
|
||||
}
|
||||
|
||||
export interface ScaleProps {
|
||||
scaleKey: string;
|
||||
time?: boolean;
|
||||
min?: number;
|
||||
max?: number;
|
||||
softMin?: number;
|
||||
softMax?: number;
|
||||
thresholds?: ThresholdsDrawHookOptions;
|
||||
padMinBy?: number;
|
||||
padMaxBy?: number;
|
||||
range?: uPlot.Scale.Range;
|
||||
auto?: boolean;
|
||||
logBase?: uPlot.Scale.LogBase;
|
||||
distribution?: DistributionType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Props for configuring a series
|
||||
*/
|
||||
|
||||
export enum FillStyle {
|
||||
Solid = 'solid',
|
||||
Dash = 'dash',
|
||||
Dot = 'dot',
|
||||
Square = 'square',
|
||||
}
|
||||
|
||||
export interface LineStyle {
|
||||
dash?: Array<number>;
|
||||
fill?: FillStyle;
|
||||
}
|
||||
|
||||
export enum DrawStyle {
|
||||
Line = 'line',
|
||||
Points = 'points',
|
||||
}
|
||||
|
||||
export enum LineInterpolation {
|
||||
Linear = 'linear',
|
||||
Spline = 'spline',
|
||||
StepAfter = 'stepAfter',
|
||||
StepBefore = 'stepBefore',
|
||||
}
|
||||
|
||||
export enum VisibilityMode {
|
||||
Always = 'always',
|
||||
Auto = 'auto',
|
||||
Never = 'never',
|
||||
}
|
||||
|
||||
export interface SeriesProps {
|
||||
scaleKey: string;
|
||||
label?: string;
|
||||
|
||||
colorMapping: Record<string, string>;
|
||||
drawStyle: DrawStyle;
|
||||
pathBuilder?: Series.PathBuilder;
|
||||
pointsFilter?: Series.Points.Filter;
|
||||
pointsBuilder?: Series.Points.Show;
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
|
||||
isDarkMode?: boolean;
|
||||
|
||||
// Line config
|
||||
lineColor?: string;
|
||||
lineInterpolation?: LineInterpolation;
|
||||
lineStyle?: LineStyle;
|
||||
lineWidth?: number;
|
||||
|
||||
// Points config
|
||||
pointColor?: string;
|
||||
pointSize?: number;
|
||||
showPoints?: VisibilityMode;
|
||||
}
|
||||
|
||||
export interface LegendItem {
|
||||
seriesIndex: number;
|
||||
label: uPlot.Series['label'];
|
||||
color: uPlot.Series['stroke'];
|
||||
show: boolean;
|
||||
}
|
||||
|
||||
export const DEFAULT_PLOT_CONFIG: Partial<Options> = {
|
||||
focus: {
|
||||
alpha: 0.3,
|
||||
},
|
||||
cursor: {
|
||||
focus: {
|
||||
prox: 30,
|
||||
},
|
||||
},
|
||||
legend: {
|
||||
show: false,
|
||||
},
|
||||
padding: [16, 16, 8, 8],
|
||||
series: [],
|
||||
hooks: {},
|
||||
};
|
||||
|
||||
const POINTS_FILL_COLOR = '#FFFFFF';
|
||||
|
||||
export const DEFAULT_CURSOR_CONFIG: Cursor = {
|
||||
drag: { setScale: true },
|
||||
points: {
|
||||
one: true,
|
||||
size: (u, seriesIdx) => (u.series[seriesIdx]?.points?.size ?? 0) * 3,
|
||||
width: (_u, _seriesIdx, size) => size / 4,
|
||||
stroke: (u, seriesIdx): string => {
|
||||
const points = u.series[seriesIdx]?.points;
|
||||
const strokeFn =
|
||||
typeof points?.stroke === 'function' ? points.stroke : undefined;
|
||||
const strokeValue =
|
||||
strokeFn !== undefined
|
||||
? strokeFn(u, seriesIdx)
|
||||
: typeof points?.stroke === 'string'
|
||||
? points.stroke
|
||||
: '';
|
||||
return `${strokeValue}90`;
|
||||
},
|
||||
fill: (): string => POINTS_FILL_COLOR,
|
||||
},
|
||||
focus: {
|
||||
prox: 30,
|
||||
},
|
||||
};
|
||||
136
frontend/src/lib/uPlotV2/context/PlotContext.tsx
Normal file
136
frontend/src/lib/uPlotV2/context/PlotContext.tsx
Normal file
@@ -0,0 +1,136 @@
|
||||
import {
|
||||
createContext,
|
||||
PropsWithChildren,
|
||||
useCallback,
|
||||
useContext,
|
||||
useMemo,
|
||||
useRef,
|
||||
} from 'react';
|
||||
import type { SeriesVisibilityItem } from 'container/DashboardContainer/visualization/panels/types';
|
||||
import { updateSeriesVisibilityToLocalStorage } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
|
||||
import type uPlot from 'uplot';
|
||||
|
||||
export interface PlotContextInitialState {
|
||||
uPlotInstance: uPlot | null;
|
||||
widgetId?: string;
|
||||
}
|
||||
export interface IPlotContext {
|
||||
setPlotContextInitialState: (state: PlotContextInitialState) => void;
|
||||
onToggleSeriesVisibility: (seriesIndex: number) => void;
|
||||
onToggleSeriesOnOff: (seriesIndex: number) => void;
|
||||
onFocusSeries: (seriesIndex: number | null) => void;
|
||||
}
|
||||
|
||||
export const PlotContext = createContext<IPlotContext | null>(null);
|
||||
|
||||
export const PlotContextProvider = ({
|
||||
children,
|
||||
}: PropsWithChildren): JSX.Element => {
|
||||
const uPlotInstanceRef = useRef<uPlot | null>(null);
|
||||
const activeSeriesIndex = useRef<number | undefined>(undefined);
|
||||
const widgetIdRef = useRef<string | undefined>(undefined);
|
||||
|
||||
const setPlotContextInitialState = useCallback(
|
||||
({ uPlotInstance, widgetId }: PlotContextInitialState): void => {
|
||||
uPlotInstanceRef.current = uPlotInstance;
|
||||
widgetIdRef.current = widgetId;
|
||||
activeSeriesIndex.current = undefined;
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const onToggleSeriesVisibility = useCallback((seriesIndex: number): void => {
|
||||
const plot = uPlotInstanceRef.current;
|
||||
if (!plot) {
|
||||
return;
|
||||
}
|
||||
|
||||
const isReset = activeSeriesIndex.current === seriesIndex;
|
||||
activeSeriesIndex.current = isReset ? undefined : seriesIndex;
|
||||
|
||||
plot.batch(() => {
|
||||
plot.series.forEach((_, index) => {
|
||||
if (index === 0) {
|
||||
return;
|
||||
}
|
||||
const currentSeriesIndex = index;
|
||||
plot.setSeries(currentSeriesIndex, {
|
||||
show: isReset || currentSeriesIndex === seriesIndex,
|
||||
});
|
||||
});
|
||||
if (widgetIdRef.current) {
|
||||
const seriesVisibility: SeriesVisibilityItem[] = plot.series.map(
|
||||
(series) => ({
|
||||
label: series.label ?? '',
|
||||
show: series.show ?? true,
|
||||
}),
|
||||
);
|
||||
updateSeriesVisibilityToLocalStorage(widgetIdRef.current, seriesVisibility);
|
||||
}
|
||||
});
|
||||
}, []);
|
||||
|
||||
const onToggleSeriesOnOff = useCallback((seriesIndex: number): void => {
|
||||
const plot = uPlotInstanceRef.current;
|
||||
if (!plot) {
|
||||
return;
|
||||
}
|
||||
|
||||
const series = plot.series[seriesIndex];
|
||||
if (!series) {
|
||||
return;
|
||||
}
|
||||
plot.setSeries(seriesIndex, { show: !series.show });
|
||||
if (widgetIdRef.current) {
|
||||
const seriesVisibility: SeriesVisibilityItem[] = plot.series.map(
|
||||
(series) => ({
|
||||
label: series.label ?? '',
|
||||
show: series.show ?? true,
|
||||
}),
|
||||
);
|
||||
updateSeriesVisibilityToLocalStorage(widgetIdRef.current, seriesVisibility);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const onFocusSeries = useCallback((seriesIndex: number | null): void => {
|
||||
const plot = uPlotInstanceRef.current;
|
||||
if (!plot) {
|
||||
return;
|
||||
}
|
||||
|
||||
plot.setSeries(
|
||||
seriesIndex,
|
||||
{
|
||||
focus: true,
|
||||
},
|
||||
false,
|
||||
);
|
||||
}, []);
|
||||
|
||||
const value = useMemo(
|
||||
() => ({
|
||||
onToggleSeriesVisibility,
|
||||
setPlotContextInitialState,
|
||||
onToggleSeriesOnOff,
|
||||
onFocusSeries,
|
||||
}),
|
||||
[
|
||||
onToggleSeriesVisibility,
|
||||
setPlotContextInitialState,
|
||||
onToggleSeriesOnOff,
|
||||
onFocusSeries,
|
||||
],
|
||||
);
|
||||
|
||||
return <PlotContext.Provider value={value}>{children}</PlotContext.Provider>;
|
||||
};
|
||||
|
||||
export const usePlotContext = (): IPlotContext => {
|
||||
const context = useContext(PlotContext);
|
||||
|
||||
if (!context) {
|
||||
throw new Error('Should be used inside the context');
|
||||
}
|
||||
|
||||
return context;
|
||||
};
|
||||
12
frontend/src/lib/uPlotV2/hooks/types.ts
Normal file
12
frontend/src/lib/uPlotV2/hooks/types.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export interface Threshold {
|
||||
thresholdValue: number;
|
||||
thresholdColor?: string;
|
||||
thresholdUnit?: string;
|
||||
thresholdLabel?: string;
|
||||
}
|
||||
|
||||
export interface ThresholdsDrawHookOptions {
|
||||
scaleKey: string;
|
||||
thresholds: Threshold[];
|
||||
yAxisUnit?: string;
|
||||
}
|
||||
142
frontend/src/lib/uPlotV2/hooks/useLegendsSync.ts
Normal file
142
frontend/src/lib/uPlotV2/hooks/useLegendsSync.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import {
|
||||
Dispatch,
|
||||
SetStateAction,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useLayoutEffect,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { LegendItem } from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { get } from 'lodash-es';
|
||||
|
||||
/**
|
||||
* Syncs legend UI state with the uPlot chart: which series is focused and each series' visibility.
|
||||
* Subscribes to the config's setSeries hook so legend items stay in sync when series are toggled
|
||||
* from the chart or from the Legend component.
|
||||
*
|
||||
* @param config - UPlot config builder; used to read legend items and to register the setSeries hook
|
||||
* @param subscribeToFocusChange - When true, updates focusedSeriesIndex when a series gains focus via setSeries
|
||||
* @returns focusedSeriesIndex, setFocusedSeriesIndex, and legendItemsMap for the Legend component
|
||||
*/
|
||||
export default function useLegendsSync({
|
||||
config,
|
||||
subscribeToFocusChange = true,
|
||||
}: {
|
||||
config: UPlotConfigBuilder;
|
||||
subscribeToFocusChange?: boolean;
|
||||
}): {
|
||||
focusedSeriesIndex: number | null;
|
||||
setFocusedSeriesIndex: Dispatch<SetStateAction<number | null>>;
|
||||
legendItemsMap: Record<number, LegendItem>;
|
||||
} {
|
||||
const [legendItemsMap, setLegendItemsMap] = useState<
|
||||
Record<number, LegendItem>
|
||||
>({});
|
||||
const [focusedSeriesIndex, setFocusedSeriesIndex] = useState<number | null>(
|
||||
null,
|
||||
);
|
||||
|
||||
/** Pending visibility updates (series index -> show) to apply in the next RAF. */
|
||||
const visibilityUpdatesRef = useRef<Record<number, boolean>>({});
|
||||
/** RAF id for the batched visibility update; null when no update is scheduled. */
|
||||
const visibilityRafIdRef = useRef<number | null>(null);
|
||||
|
||||
/**
|
||||
* Applies a batch of visibility updates to legendItemsMap.
|
||||
* Only updates entries that exist and whose show value changed; returns prev state if nothing changed.
|
||||
*/
|
||||
const applyVisibilityUpdates = useCallback(
|
||||
(updates: Record<number, boolean>): void => {
|
||||
setLegendItemsMap(
|
||||
(prev): Record<number, LegendItem> => {
|
||||
let hasChanges = false;
|
||||
const next = { ...prev };
|
||||
|
||||
for (const [idxStr, show] of Object.entries(updates)) {
|
||||
const idx = Number(idxStr);
|
||||
const current = next[idx];
|
||||
if (!current || current.show === show) {
|
||||
continue;
|
||||
}
|
||||
next[idx] = { ...current, show };
|
||||
hasChanges = true;
|
||||
}
|
||||
|
||||
return hasChanges ? next : prev;
|
||||
},
|
||||
);
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
/**
|
||||
* Queues a single series visibility update and schedules at most one state update per frame.
|
||||
* Batches multiple visibility changes (e.g. from setSeries) into one setLegendItemsMap call.
|
||||
*/
|
||||
const queueVisibilityUpdate = useCallback(
|
||||
(seriesIndex: number, show: boolean): void => {
|
||||
visibilityUpdatesRef.current[seriesIndex] = show;
|
||||
|
||||
if (visibilityRafIdRef.current !== null) {
|
||||
return;
|
||||
}
|
||||
|
||||
visibilityRafIdRef.current = requestAnimationFrame(() => {
|
||||
const updates = visibilityUpdatesRef.current;
|
||||
visibilityUpdatesRef.current = {};
|
||||
visibilityRafIdRef.current = null;
|
||||
|
||||
applyVisibilityUpdates(updates);
|
||||
});
|
||||
},
|
||||
[applyVisibilityUpdates],
|
||||
);
|
||||
|
||||
/**
|
||||
* Handler for uPlot's setSeries hook. Updates focused series when opts.focus is set,
|
||||
* and queues legend visibility updates when opts.show changes so the legend stays in sync.
|
||||
*/
|
||||
const handleSetSeries = useCallback(
|
||||
(_u: uPlot, seriesIndex: number | null, opts: uPlot.Series): void => {
|
||||
if (subscribeToFocusChange && get(opts, 'focus', false)) {
|
||||
setFocusedSeriesIndex(seriesIndex);
|
||||
}
|
||||
|
||||
if (!seriesIndex || typeof opts.show !== 'boolean') {
|
||||
return;
|
||||
}
|
||||
|
||||
queueVisibilityUpdate(seriesIndex, opts.show);
|
||||
},
|
||||
[queueVisibilityUpdate, subscribeToFocusChange],
|
||||
);
|
||||
|
||||
// Initialize legend items from config and subscribe to setSeries; cleanup on unmount or config change.
|
||||
useLayoutEffect(() => {
|
||||
setLegendItemsMap(config.getLegendItems());
|
||||
|
||||
const removeHook = config.addHook('setSeries', handleSetSeries);
|
||||
|
||||
return (): void => {
|
||||
removeHook();
|
||||
};
|
||||
}, [config, handleSetSeries]);
|
||||
|
||||
// Cancel any pending RAF on unmount to avoid state updates after unmount.
|
||||
useEffect(
|
||||
() => (): void => {
|
||||
if (visibilityRafIdRef.current != null) {
|
||||
cancelAnimationFrame(visibilityRafIdRef.current);
|
||||
}
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
return {
|
||||
focusedSeriesIndex,
|
||||
setFocusedSeriesIndex,
|
||||
legendItemsMap,
|
||||
};
|
||||
}
|
||||
65
frontend/src/lib/uPlotV2/hooks/useThresholdsDrawHook.ts
Normal file
65
frontend/src/lib/uPlotV2/hooks/useThresholdsDrawHook.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { convertValue } from 'lib/getConvertedValue';
|
||||
import uPlot, { Hooks } from 'uplot';
|
||||
|
||||
import { Threshold, ThresholdsDrawHookOptions } from './types';
|
||||
|
||||
export function thresholdsDrawHook(
|
||||
options: ThresholdsDrawHookOptions,
|
||||
): Hooks.Defs['draw'] {
|
||||
const dashSegments = [10, 5];
|
||||
|
||||
function addLines(u: uPlot, scaleKey: string, thresholds: Threshold[]): void {
|
||||
const ctx = u.ctx;
|
||||
|
||||
ctx.save();
|
||||
ctx.lineWidth = 2;
|
||||
ctx.setLineDash(dashSegments);
|
||||
|
||||
const threshold90Percent = ctx.canvas.height * 0.9;
|
||||
|
||||
for (let idx = 0; idx < thresholds.length; idx++) {
|
||||
const threshold = thresholds[idx];
|
||||
const color = threshold.thresholdColor || 'red';
|
||||
|
||||
const yValue = convertValue(
|
||||
threshold.thresholdValue,
|
||||
threshold.thresholdUnit,
|
||||
options.yAxisUnit,
|
||||
);
|
||||
|
||||
const scaleVal = u.valToPos(Number(yValue), scaleKey, true);
|
||||
|
||||
const x0 = Math.round(u.bbox.left);
|
||||
const y0 = Math.round(scaleVal);
|
||||
const x1 = Math.round(u.bbox.left + u.bbox.width);
|
||||
const y1 = Math.round(scaleVal);
|
||||
|
||||
ctx.strokeStyle = color;
|
||||
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x0, y0);
|
||||
ctx.lineTo(x1, y1);
|
||||
|
||||
ctx.stroke();
|
||||
|
||||
// Draw threshold label if present
|
||||
if (threshold.thresholdLabel) {
|
||||
const textWidth = ctx.measureText(threshold.thresholdLabel).width;
|
||||
const textX = x1 - textWidth - 20;
|
||||
const yposHeight = ctx.canvas.height - y1;
|
||||
const textY = yposHeight > threshold90Percent ? y0 + 15 : y0 - 15;
|
||||
|
||||
ctx.fillStyle = color;
|
||||
ctx.fillText(threshold.thresholdLabel, textX, textY);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { scaleKey, thresholds } = options;
|
||||
|
||||
return (u: uPlot): void => {
|
||||
const ctx = u.ctx;
|
||||
addLines(u, scaleKey, thresholds);
|
||||
ctx.restore();
|
||||
};
|
||||
}
|
||||
53
frontend/src/lib/uPlotV2/utils/dataUtils.ts
Normal file
53
frontend/src/lib/uPlotV2/utils/dataUtils.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* Checks if a value is invalid for plotting
|
||||
*
|
||||
* @param value - The value to check
|
||||
* @returns true if the value is invalid (should be replaced with null), false otherwise
|
||||
*/
|
||||
export function isInvalidPlotValue(value: unknown): boolean {
|
||||
// Check for null or undefined
|
||||
if (value === null || value === undefined) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Handle number checks
|
||||
if (typeof value === 'number') {
|
||||
// Check for NaN, Infinity, -Infinity
|
||||
return !Number.isFinite(value);
|
||||
}
|
||||
|
||||
// Handle string values
|
||||
if (typeof value === 'string') {
|
||||
// Check for string representations of infinity
|
||||
if (['+Inf', '-Inf', 'Infinity', '-Infinity', 'NaN'].includes(value)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Try to parse the string as a number
|
||||
const numValue = parseFloat(value);
|
||||
|
||||
// If parsing failed or resulted in a non-finite number, it's invalid
|
||||
if (Number.isNaN(numValue) || !Number.isFinite(numValue)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Value is valid for plotting
|
||||
return false;
|
||||
}
|
||||
|
||||
export function normalizePlotValue(
|
||||
value: number | string | null | undefined,
|
||||
): number | null {
|
||||
if (isInvalidPlotValue(value)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Convert string numbers to actual numbers
|
||||
if (typeof value === 'string') {
|
||||
return parseFloat(value);
|
||||
}
|
||||
|
||||
// Already a valid number
|
||||
return value as number;
|
||||
}
|
||||
415
frontend/src/lib/uPlotV2/utils/scale.ts
Normal file
415
frontend/src/lib/uPlotV2/utils/scale.ts
Normal file
@@ -0,0 +1,415 @@
|
||||
/**
|
||||
* Scale utilities for uPlot Y-axis configuration.
|
||||
* Handles linear/log distribution, range computation (with padding and soft/hard limits),
|
||||
* log-scale snapping, and threshold-aware soft limits.
|
||||
*/
|
||||
|
||||
import uPlot, { Range, Scale } from 'uplot';
|
||||
|
||||
import { DistributionType, ScaleProps } from '../config/types';
|
||||
import { Threshold } from '../hooks/types';
|
||||
import { findMinMaxThresholdValues } from './threshold';
|
||||
import { LogScaleLimits, RangeFunctionParams } from './types';
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps min/max/softMin/softMax to valid log-scale values (powers of logBase).
|
||||
* Only applies when distribution is logarithmic; otherwise returns limits unchanged.
|
||||
* Ensures axis bounds align to log "magnitude" for readable tick labels.
|
||||
*/
|
||||
export function normalizeLogScaleLimits({
|
||||
distr,
|
||||
logBase,
|
||||
limits,
|
||||
}: {
|
||||
distr?: DistributionType;
|
||||
logBase: number;
|
||||
limits: LogScaleLimits;
|
||||
}): LogScaleLimits {
|
||||
if (distr !== DistributionType.Logarithmic) {
|
||||
return limits;
|
||||
}
|
||||
|
||||
const logFn = logBase === 2 ? Math.log2 : Math.log10;
|
||||
|
||||
return {
|
||||
min: normalizeLogLimit(limits.min, logBase, logFn, Math.floor),
|
||||
max: normalizeLogLimit(limits.max, logBase, logFn, Math.ceil),
|
||||
softMin: normalizeLogLimit(limits.softMin, logBase, logFn, Math.floor),
|
||||
softMax: normalizeLogLimit(limits.softMax, logBase, logFn, Math.ceil),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a single limit value to the nearest valid log-scale value.
|
||||
* Rounds the log(value) with roundFn, then returns logBase^exp.
|
||||
* Values <= 0 or null are returned as-is (log scale requires positive values).
|
||||
*/
|
||||
function normalizeLogLimit(
|
||||
value: number | null,
|
||||
logBase: number,
|
||||
logFn: (v: number) => number,
|
||||
roundFn: (v: number) => number,
|
||||
): number | null {
|
||||
if (value == null || value <= 0) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const exp = roundFn(logFn(value));
|
||||
return logBase ** exp;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns uPlot scale distribution options for the Y axis.
|
||||
* Time (X) scale gets no distr/log; Y scale gets distr 1 (linear) or 3 (log) and log base 2 or 10.
|
||||
*/
|
||||
export function getDistributionConfig({
|
||||
time,
|
||||
distr,
|
||||
logBase,
|
||||
}: {
|
||||
time: ScaleProps['time'];
|
||||
distr?: DistributionType;
|
||||
logBase?: number;
|
||||
}): Partial<Scale> {
|
||||
if (time) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const resolvedLogBase = (logBase ?? 10) === 2 ? 2 : 10;
|
||||
|
||||
return {
|
||||
distr: distr === DistributionType.Logarithmic ? 3 : 1,
|
||||
log: resolvedLogBase,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds uPlot range config and flags for the range function.
|
||||
* - rangeConfig: pad, hard, soft, mode for min and max (used by uPlot.rangeNum / rangeLog).
|
||||
* - hardMinOnly / hardMaxOnly: true when only a hard limit is set (no soft), so range uses that bound.
|
||||
* - hasFixedRange: true when both min and max are hard-only (fully fixed axis).
|
||||
*/
|
||||
export function getRangeConfig(
|
||||
min: number | null,
|
||||
max: number | null,
|
||||
softMin: number | null,
|
||||
softMax: number | null,
|
||||
padMinBy: number,
|
||||
padMaxBy: number,
|
||||
): {
|
||||
rangeConfig: Range.Config;
|
||||
hardMinOnly: boolean;
|
||||
hardMaxOnly: boolean;
|
||||
hasFixedRange: boolean;
|
||||
} {
|
||||
// uPlot: mode 3 = auto pad from data; mode 1 = respect soft limit
|
||||
const softMinMode: Range.SoftMode = softMin == null ? 3 : 1;
|
||||
const softMaxMode: Range.SoftMode = softMax == null ? 3 : 1;
|
||||
|
||||
const rangeConfig: Range.Config = {
|
||||
min: {
|
||||
pad: padMinBy,
|
||||
hard: min ?? -Infinity,
|
||||
soft: softMin !== null ? softMin : undefined,
|
||||
mode: softMinMode,
|
||||
},
|
||||
max: {
|
||||
pad: padMaxBy,
|
||||
hard: max ?? Infinity,
|
||||
soft: softMax !== null ? softMax : undefined,
|
||||
mode: softMaxMode,
|
||||
},
|
||||
};
|
||||
|
||||
const hardMinOnly = softMin == null && min != null;
|
||||
const hardMaxOnly = softMax == null && max != null;
|
||||
const hasFixedRange = hardMinOnly && hardMaxOnly;
|
||||
|
||||
return {
|
||||
rangeConfig,
|
||||
hardMinOnly,
|
||||
hardMaxOnly,
|
||||
hasFixedRange,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initial [min, max] for the range pipeline. Returns null when we have no data and no fixed range
|
||||
* (so the caller can bail and return [dataMin, dataMax] unchanged).
|
||||
*/
|
||||
function getInitialMinMax(
|
||||
dataMin: number | null,
|
||||
dataMax: number | null,
|
||||
hasFixedRange: boolean,
|
||||
): Range.MinMax | null {
|
||||
if (!hasFixedRange && dataMin == null && dataMax == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return [dataMin, dataMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the linear-scale range using uPlot.rangeNum.
|
||||
* Uses hard min/max when hardMinOnly/hardMaxOnly; otherwise uses data min/max. Applies padding via rangeConfig.
|
||||
*/
|
||||
function getLinearScaleRange(
|
||||
minMax: Range.MinMax,
|
||||
params: RangeFunctionParams,
|
||||
dataMin: number | null,
|
||||
dataMax: number | null,
|
||||
): Range.MinMax {
|
||||
const { rangeConfig, hardMinOnly, hardMaxOnly, min, max } = params;
|
||||
const resolvedMin = hardMinOnly ? min : dataMin;
|
||||
const resolvedMax = hardMaxOnly ? max : dataMax;
|
||||
|
||||
if (resolvedMin == null || resolvedMax == null) {
|
||||
return minMax;
|
||||
}
|
||||
|
||||
return uPlot.rangeNum(resolvedMin, resolvedMax, rangeConfig);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the log-scale range using uPlot.rangeLog.
|
||||
* Resolves min/max from params or data, then delegates to uPlot's log range helper.
|
||||
*/
|
||||
function getLogScaleRange(
|
||||
minMax: Range.MinMax,
|
||||
params: RangeFunctionParams,
|
||||
dataMin: number | null,
|
||||
dataMax: number | null,
|
||||
logBase?: uPlot.Scale['log'],
|
||||
): Range.MinMax {
|
||||
const { min, max } = params;
|
||||
const resolvedMin = min ?? dataMin;
|
||||
const resolvedMax = max ?? dataMax;
|
||||
|
||||
if (resolvedMin == null || resolvedMax == null) {
|
||||
return minMax;
|
||||
}
|
||||
|
||||
return uPlot.rangeLog(
|
||||
resolvedMin,
|
||||
resolvedMax,
|
||||
(logBase ?? 10) as 2 | 10,
|
||||
true,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps linear scale min down and max up to whole numbers so axis bounds are clean.
|
||||
*/
|
||||
function roundLinearRange(minMax: Range.MinMax): Range.MinMax {
|
||||
const [currentMin, currentMax] = minMax;
|
||||
let roundedMin = currentMin;
|
||||
let roundedMax = currentMax;
|
||||
|
||||
if (roundedMin != null) {
|
||||
roundedMin = incrRoundDn(roundedMin, 1);
|
||||
}
|
||||
|
||||
if (roundedMax != null) {
|
||||
roundedMax = incrRoundUp(roundedMax, 1);
|
||||
}
|
||||
|
||||
return [roundedMin, roundedMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps log-scale [min, max] to exact powers of logBase (nearest magnitude below/above).
|
||||
* If min and max would be equal after snapping, max is increased by one magnitude so the range is valid.
|
||||
*/
|
||||
function adjustLogRange(
|
||||
minMax: Range.MinMax,
|
||||
logBase: number,
|
||||
logFn: (v: number) => number,
|
||||
): Range.MinMax {
|
||||
let [currentMin, currentMax] = minMax;
|
||||
|
||||
if (currentMin != null) {
|
||||
const minExp = Math.floor(logFn(currentMin));
|
||||
currentMin = logBase ** minExp;
|
||||
}
|
||||
|
||||
if (currentMax != null) {
|
||||
const maxExp = Math.ceil(logFn(currentMax));
|
||||
currentMax = logBase ** maxExp;
|
||||
|
||||
if (currentMin === currentMax) {
|
||||
currentMax *= logBase;
|
||||
}
|
||||
}
|
||||
|
||||
return [currentMin, currentMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* For linear scales (distr === 1), clamps the computed range to the configured hard min/max when
|
||||
* hardMinOnly/hardMaxOnly are set. No-op for log scales.
|
||||
*/
|
||||
function applyHardLimits(
|
||||
minMax: Range.MinMax,
|
||||
params: RangeFunctionParams,
|
||||
distr: number,
|
||||
): Range.MinMax {
|
||||
let [currentMin, currentMax] = minMax;
|
||||
|
||||
if (distr !== 1) {
|
||||
return [currentMin, currentMax];
|
||||
}
|
||||
|
||||
const { hardMinOnly, hardMaxOnly, min, max } = params;
|
||||
|
||||
if (hardMinOnly && min != null) {
|
||||
currentMin = min;
|
||||
}
|
||||
if (hardMaxOnly && max != null) {
|
||||
currentMax = max;
|
||||
}
|
||||
|
||||
return [currentMin, currentMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* If the range is invalid (min >= max), returns a safe default: [1, 100] for log (distr 3), [0, 100] for linear.
|
||||
*/
|
||||
function enforceValidRange(minMax: Range.MinMax, distr: number): Range.MinMax {
|
||||
const [currentMin, currentMax] = minMax;
|
||||
|
||||
if (currentMin != null && currentMax != null && currentMin >= currentMax) {
|
||||
return [distr === 3 ? 1 : 0, 100];
|
||||
}
|
||||
|
||||
return minMax;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the uPlot range function for a scale. Called by uPlot with (u, dataMin, dataMax, scaleKey).
|
||||
* Pipeline: initial min/max -> linear or log range (with padding) -> rounding/snapping -> hard limits -> valid range.
|
||||
*/
|
||||
export function createRangeFunction(
|
||||
params: RangeFunctionParams,
|
||||
): Range.Function {
|
||||
return (
|
||||
u: uPlot,
|
||||
dataMin: number | null,
|
||||
dataMax: number | null,
|
||||
scaleKey: string,
|
||||
): Range.MinMax => {
|
||||
const scale = u.scales[scaleKey];
|
||||
|
||||
const initialMinMax = getInitialMinMax(
|
||||
dataMin,
|
||||
dataMax,
|
||||
params.hasFixedRange,
|
||||
);
|
||||
if (!initialMinMax) {
|
||||
return [dataMin, dataMax];
|
||||
}
|
||||
|
||||
let minMax: Range.MinMax = initialMinMax;
|
||||
|
||||
const logBase = scale.log;
|
||||
|
||||
if (scale.distr === 1) {
|
||||
minMax = getLinearScaleRange(minMax, params, dataMin, dataMax);
|
||||
minMax = roundLinearRange(minMax);
|
||||
} else if (scale.distr === 3) {
|
||||
minMax = getLogScaleRange(minMax, params, dataMin, dataMax, logBase);
|
||||
const logFn = scale.log === 2 ? Math.log2 : Math.log10;
|
||||
minMax = adjustLogRange(minMax, (logBase ?? 10) as number, logFn);
|
||||
}
|
||||
|
||||
minMax = applyHardLimits(minMax, params, scale.distr ?? 1);
|
||||
|
||||
return enforceValidRange(minMax, scale.distr ?? 1);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Expands softMin/softMax so that all threshold lines fall within the soft range and stay visible.
|
||||
* Converts threshold values to yAxisUnit, then takes the min/max; softMin is lowered (or set) to
|
||||
* include the smallest threshold, softMax is raised (or set) to include the largest.
|
||||
*/
|
||||
export function adjustSoftLimitsWithThresholds(
|
||||
softMin: number | null,
|
||||
softMax: number | null,
|
||||
thresholds?: Threshold[],
|
||||
yAxisUnit?: string,
|
||||
): {
|
||||
softMin: number | null;
|
||||
softMax: number | null;
|
||||
} {
|
||||
if (!thresholds || thresholds.length === 0) {
|
||||
return { softMin, softMax };
|
||||
}
|
||||
|
||||
const [minThresholdValue, maxThresholdValue] = findMinMaxThresholdValues(
|
||||
thresholds,
|
||||
yAxisUnit,
|
||||
);
|
||||
|
||||
if (minThresholdValue === null && maxThresholdValue === null) {
|
||||
return { softMin, softMax };
|
||||
}
|
||||
|
||||
const adjustedSoftMin =
|
||||
minThresholdValue !== null
|
||||
? softMin !== null
|
||||
? Math.min(softMin, minThresholdValue)
|
||||
: minThresholdValue
|
||||
: softMin;
|
||||
|
||||
const adjustedSoftMax =
|
||||
maxThresholdValue !== null
|
||||
? softMax !== null
|
||||
? Math.max(softMax, maxThresholdValue)
|
||||
: maxThresholdValue
|
||||
: softMax;
|
||||
|
||||
return {
|
||||
softMin: adjustedSoftMin,
|
||||
softMax: adjustedSoftMax,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns fallback time bounds (min/max) as Unix timestamps in seconds when no
|
||||
* data range is available. Uses the last 24 hours: from one day ago to now.
|
||||
*/
|
||||
export function getFallbackMinMaxTimeStamp(): {
|
||||
fallbackMin: number;
|
||||
fallbackMax: number;
|
||||
} {
|
||||
const currentDate = new Date();
|
||||
// Get the Unix timestamp (milliseconds since January 1, 1970)
|
||||
const currentTime = currentDate.getTime();
|
||||
const currentUnixTimestamp = Math.floor(currentTime / 1000);
|
||||
|
||||
// Calculate the date and time one day ago
|
||||
const oneDayAgoUnixTimestamp = Math.floor(
|
||||
(currentDate.getTime() - 86400000) / 1000,
|
||||
); // 86400000 milliseconds in a day
|
||||
|
||||
return {
|
||||
fallbackMin: oneDayAgoUnixTimestamp,
|
||||
fallbackMax: currentUnixTimestamp,
|
||||
};
|
||||
}
|
||||
39
frontend/src/lib/uPlotV2/utils/threshold.ts
Normal file
39
frontend/src/lib/uPlotV2/utils/threshold.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { convertValue } from 'lib/getConvertedValue';
|
||||
|
||||
import { Threshold } from '../hooks/types';
|
||||
|
||||
/**
|
||||
* Find min and max threshold values after converting to the target unit
|
||||
*/
|
||||
export function findMinMaxThresholdValues(
|
||||
thresholds: Threshold[],
|
||||
yAxisUnit?: string,
|
||||
): [number | null, number | null] {
|
||||
if (!thresholds || thresholds.length === 0) {
|
||||
return [null, null];
|
||||
}
|
||||
|
||||
let minThresholdValue: number | null = null;
|
||||
let maxThresholdValue: number | null = null;
|
||||
|
||||
thresholds.forEach((threshold) => {
|
||||
const { thresholdValue, thresholdUnit } = threshold;
|
||||
if (thresholdValue === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const compareValue = convertValue(thresholdValue, thresholdUnit, yAxisUnit);
|
||||
if (compareValue === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (minThresholdValue === null || compareValue < minThresholdValue) {
|
||||
minThresholdValue = compareValue;
|
||||
}
|
||||
if (maxThresholdValue === null || compareValue > maxThresholdValue) {
|
||||
maxThresholdValue = compareValue;
|
||||
}
|
||||
});
|
||||
|
||||
return [minThresholdValue, maxThresholdValue];
|
||||
}
|
||||
17
frontend/src/lib/uPlotV2/utils/types.ts
Normal file
17
frontend/src/lib/uPlotV2/utils/types.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Range } from 'uplot';
|
||||
|
||||
export type LogScaleLimits = {
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
softMin: number | null;
|
||||
softMax: number | null;
|
||||
};
|
||||
|
||||
export type RangeFunctionParams = {
|
||||
rangeConfig: Range.Config;
|
||||
hardMinOnly: boolean;
|
||||
hardMaxOnly: boolean;
|
||||
hasFixedRange: boolean;
|
||||
min: number | null;
|
||||
max: number | null;
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { MemoryRouter, useLocation } from 'react-router-dom';
|
||||
import ROUTES from 'constants/routes';
|
||||
import * as dashboardUtils from 'container/DashboardContainer/DashboardDescription';
|
||||
import { sanitizeDashboardData } from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import DashboardsList from 'container/ListOfDashboard';
|
||||
import {
|
||||
dashboardEmptyState,
|
||||
@@ -12,8 +12,9 @@ import { rest } from 'msw';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { fireEvent, render, waitFor } from 'tests/test-utils';
|
||||
|
||||
jest.mock('container/DashboardContainer/DashboardDescription', () => ({
|
||||
sanitizeDashboardData: jest.fn(),
|
||||
jest.mock('container/DashboardContainer/DashboardDescription/utils', () => ({
|
||||
sanitizeDashboardData: jest.fn((data) => data),
|
||||
downloadObjectAsJson: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
@@ -232,7 +233,7 @@ describe('dashboard list page', () => {
|
||||
expect(exportJsonBtn).toBeInTheDocument();
|
||||
fireEvent.click(exportJsonBtn);
|
||||
const firstDashboardData = dashboardSuccessResponse.data[0];
|
||||
expect(dashboardUtils.sanitizeDashboardData).toHaveBeenCalledWith(
|
||||
expect(sanitizeDashboardData).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
title: firstDashboardData.data.title,
|
||||
createdAt: firstDashboardData.createdAt,
|
||||
|
||||
@@ -45,6 +45,8 @@ import APIError from 'types/api/error';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as generateUUID } from 'uuid';
|
||||
|
||||
import { useDashboardVariables } from '../../hooks/dashboard/useDashboardVariables';
|
||||
import { updateDashboardVariablesStore } from './store/dashboardVariablesStore';
|
||||
import {
|
||||
DashboardSortOrder,
|
||||
IDashboardContext,
|
||||
@@ -196,6 +198,16 @@ export function DashboardProvider({
|
||||
: isDashboardWidgetPage?.params.dashboardId) || '';
|
||||
|
||||
const [selectedDashboard, setSelectedDashboard] = useState<Dashboard>();
|
||||
const dashboardVariables = useDashboardVariables();
|
||||
|
||||
useEffect(() => {
|
||||
const existingVariables = dashboardVariables;
|
||||
const updatedVariables = selectedDashboard?.data.variables || {};
|
||||
|
||||
if (!isEqual(existingVariables, updatedVariables)) {
|
||||
updateDashboardVariablesStore(updatedVariables);
|
||||
}
|
||||
}, [selectedDashboard]);
|
||||
|
||||
const {
|
||||
currentDashboard,
|
||||
|
||||
@@ -8,6 +8,7 @@ import ROUTES from 'constants/routes';
|
||||
import { DashboardProvider, useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useDashboardVariables } from '../../../hooks/dashboard/useDashboardVariables';
|
||||
import { initializeDefaultVariables } from '../initializeDefaultVariables';
|
||||
import { normalizeUrlValueForVariable } from '../normalizeUrlValue';
|
||||
|
||||
@@ -55,6 +56,7 @@ jest.mock('uuid', () => ({ v4: jest.fn(() => 'mock-uuid') }));
|
||||
|
||||
function TestComponent(): JSX.Element {
|
||||
const { dashboardResponse, dashboardId, selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
return (
|
||||
<div>
|
||||
@@ -65,9 +67,7 @@ function TestComponent(): JSX.Element {
|
||||
{dashboardResponse.isFetching.toString()}
|
||||
</div>
|
||||
<div data-testid="dashboard-variables">
|
||||
{selectedDashboard?.data?.variables
|
||||
? JSON.stringify(selectedDashboard.data.variables)
|
||||
: 'null'}
|
||||
{dashboardVariables ? JSON.stringify(dashboardVariables) : 'null'}
|
||||
</div>
|
||||
<div data-testid="dashboard-data">
|
||||
{selectedDashboard?.data?.title || 'No Title'}
|
||||
|
||||
@@ -0,0 +1,17 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import createStore from './store';
|
||||
|
||||
// export type IDashboardVariables = DashboardData['variables'];
|
||||
export type IDashboardVariables = Record<string, IDashboardVariable>;
|
||||
|
||||
export const dashboardVariablesStore = createStore<IDashboardVariables>({});
|
||||
|
||||
export function updateDashboardVariablesStore(
|
||||
variables: Partial<IDashboardVariables>,
|
||||
): void {
|
||||
dashboardVariablesStore.update((currentVariables) => ({
|
||||
...currentVariables,
|
||||
...variables,
|
||||
}));
|
||||
}
|
||||
44
frontend/src/providers/Dashboard/store/store.ts
Normal file
44
frontend/src/providers/Dashboard/store/store.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { produce } from 'immer';
|
||||
type ListenerFn = () => void;
|
||||
|
||||
export default function createStore<T>(
|
||||
init: T,
|
||||
): {
|
||||
set: (setter: any) => void;
|
||||
update: (updater: (draft: T) => void) => void;
|
||||
subscribe: (listener: ListenerFn) => () => void;
|
||||
getSnapshot: () => T;
|
||||
} {
|
||||
let listeners: ListenerFn[] = [];
|
||||
let state = init;
|
||||
|
||||
function emitChange(): void {
|
||||
for (const listener of listeners) {
|
||||
listener();
|
||||
}
|
||||
}
|
||||
|
||||
function set(setter: any): void {
|
||||
state = produce(state, setter);
|
||||
emitChange();
|
||||
}
|
||||
|
||||
function update(updater: (draft: T) => void): void {
|
||||
state = produce(state, updater);
|
||||
emitChange();
|
||||
}
|
||||
|
||||
return {
|
||||
set,
|
||||
update,
|
||||
subscribe(listener: ListenerFn): () => void {
|
||||
listeners = [...listeners, listener];
|
||||
return (): void => {
|
||||
listeners = listeners.filter((l) => l !== listener);
|
||||
};
|
||||
},
|
||||
getSnapshot(): T {
|
||||
return state;
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -12030,6 +12030,11 @@ immediate@~3.0.5:
|
||||
resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b"
|
||||
integrity sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==
|
||||
|
||||
immer@11.1.3:
|
||||
version "11.1.3"
|
||||
resolved "https://registry.yarnpkg.com/immer/-/immer-11.1.3.tgz#78681e1deb6cec39753acf04eb16d7576c04f4d6"
|
||||
integrity sha512-6jQTc5z0KJFtr1UgFpIL3N9XSC3saRaI9PwWtzM2pSqkNGtiNkYY2OSwkOGDK2XcTRcLb1pi/aNkKZz0nxVH4Q==
|
||||
|
||||
immer@^9.0.6:
|
||||
version "9.0.21"
|
||||
resolved "https://registry.yarnpkg.com/immer/-/immer-9.0.21.tgz#1e025ea31a40f24fb064f1fef23e931496330176"
|
||||
|
||||
@@ -87,7 +87,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
}
|
||||
|
||||
func (m *module) listPromotedPaths(ctx context.Context) ([]string, error) {
|
||||
paths, err := m.metadataStore.GetPromotedPaths(ctx)
|
||||
paths, err := m.metadataStore.ListPromotedPaths(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -142,7 +142,7 @@ func (m *module) PromoteAndIndexPaths(
|
||||
pathsStr = append(pathsStr, path.Path)
|
||||
}
|
||||
|
||||
existingPromotedPaths, err := m.metadataStore.GetPromotedPaths(ctx, pathsStr...)
|
||||
existingPromotedPaths, err := m.metadataStore.ListPromotedPaths(ctx, pathsStr...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -8,9 +8,11 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/model"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
qslabels "github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
@@ -741,3 +743,26 @@ func (r *BaseRule) FilterNewSeries(ctx context.Context, ts time.Time, series []*
|
||||
|
||||
return filteredSeries, nil
|
||||
}
|
||||
|
||||
// HandleMissingDataAlert handles missing data alert logic by tracking the last timestamp
|
||||
// with data points and checking if a missing data alert should be sent based on the
|
||||
// [ruletypes.RuleCondition.AlertOnAbsent] and [ruletypes.RuleCondition.AbsentFor] conditions.
|
||||
//
|
||||
// Returns a pointer to the missing data alert if conditions are met, nil otherwise.
|
||||
func (r *BaseRule) HandleMissingDataAlert(ctx context.Context, ts time.Time, hasData bool) *ruletypes.Sample {
|
||||
// Track the last timestamp with data points for missing data alerts
|
||||
if hasData {
|
||||
r.lastTimestampWithDatapoints = ts
|
||||
}
|
||||
|
||||
if !r.ruleCondition.AlertOnAbsent || ts.Before(r.lastTimestampWithDatapoints.Add(time.Duration(r.ruleCondition.AbsentFor)*time.Minute)) {
|
||||
return nil
|
||||
}
|
||||
|
||||
r.logger.InfoContext(ctx, "no data found for rule condition", "rule_id", r.ID())
|
||||
lbls := labels.NewBuilder(labels.Labels{})
|
||||
if !r.lastTimestampWithDatapoints.IsZero() {
|
||||
lbls.Set(ruletypes.LabelLastSeen, r.lastTimestampWithDatapoints.Format(constants.AlertTimeFormat))
|
||||
}
|
||||
return &ruletypes.Sample{Metric: lbls.Labels(), IsMissing: true}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/times"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/timestamp"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
ruletypes "github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
@@ -142,6 +142,12 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
}
|
||||
|
||||
matrixToProcess := r.matrixToV3Series(res)
|
||||
|
||||
hasData := len(matrixToProcess) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
// Filter out new series if newGroupEvalDelay is configured
|
||||
if r.ShouldSkipNewGroups() {
|
||||
filteredSeries, filterErr := r.BaseRule.FilterNewSeries(ctx, ts, matrixToProcess)
|
||||
@@ -154,6 +160,7 @@ func (r *PromRule) buildAndRunQuery(ctx context.Context, ts time.Time) (ruletype
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
for _, series := range matrixToProcess {
|
||||
if !r.Condition().ShouldEval(series) {
|
||||
r.logger.InfoContext(
|
||||
@@ -243,6 +250,10 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
for name, value := range r.annotations.Map() {
|
||||
annotations = append(annotations, qslabels.Label{Name: name, Value: expand(value)})
|
||||
}
|
||||
if result.IsMissing {
|
||||
lb.Set(qslabels.AlertNameLabel, "[No data] "+r.Name())
|
||||
lb.Set(qslabels.NoDataLabel, "true")
|
||||
}
|
||||
|
||||
lbs := lb.Labels()
|
||||
h := lbs.Hash()
|
||||
@@ -265,6 +276,7 @@ func (r *PromRule) Eval(ctx context.Context, ts time.Time) (int, error) {
|
||||
Value: result.V,
|
||||
GeneratorURL: r.GeneratorURL(),
|
||||
Receivers: ruleReceiverMap[lbs.Map()[ruletypes.LabelThresholdName]],
|
||||
Missing: result.IsMissing,
|
||||
IsRecovering: result.IsRecovering,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1345,6 +1345,275 @@ func TestMultipleThresholdPromRule(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestPromRule_NoData(t *testing.T) {
|
||||
evalTime := time.Now()
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test no data",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(5 * time.Minute),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{Name: "Test no data"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// time_series_v4 cols of interest
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
// samples_v4 columns
|
||||
samplesCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "unix_milli", Type: "Int64"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
{Name: "flags", Type: "UInt32"},
|
||||
}
|
||||
|
||||
// see Timestamps on base_rule
|
||||
evalWindowMs := int64(5 * 60 * 1000) // 5 minutes in ms
|
||||
evalTimeMs := evalTime.UnixMilli()
|
||||
queryStart := ((evalTimeMs-2*evalWindowMs)/60000)*60000 + 1 // truncate to minute + 1ms
|
||||
queryEnd := (evalTimeMs / 60000) * 60000 // truncate to minute
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
alertOnAbsent bool
|
||||
values []any
|
||||
target float64
|
||||
expectAlerts int
|
||||
}{
|
||||
{
|
||||
description: "AlertOnAbsent=false",
|
||||
alertOnAbsent: false,
|
||||
values: []any{},
|
||||
target: 200,
|
||||
expectAlerts: 0,
|
||||
},
|
||||
{
|
||||
description: "AlertOnAbsent=true",
|
||||
alertOnAbsent: true,
|
||||
values: []any{},
|
||||
target: 200,
|
||||
expectAlerts: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AlertOnAbsent = c.alertOnAbsent
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
// single fingerprint with labels JSON
|
||||
fingerprint := uint64(12345)
|
||||
labelsJSON := `{"__name__":"test_metric"}`
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(cmock.NewRows(fingerprintCols, [][]any{{fingerprint, labelsJSON}}))
|
||||
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs("test_metric", "test_metric", "__name__", "test_metric", queryStart, queryEnd).
|
||||
WillReturnRows(cmock.NewRows(samplesCols, [][]any{}))
|
||||
|
||||
promProvider := prometheustest.New(
|
||||
context.Background(),
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
prometheus.Config{},
|
||||
telemetryStore,
|
||||
)
|
||||
defer func() {
|
||||
_ = promProvider.Close()
|
||||
}()
|
||||
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, promProvider, "", time.Second, nil, nil, options)
|
||||
rule, err := NewPromRule("some-id", valuer.GenerateUUID(), &postableRule, logger, reader, promProvider)
|
||||
require.NoError(t, err)
|
||||
|
||||
alertsFound, err := rule.Eval(context.Background(), evalTime)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, c.expectAlerts, alertsFound)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPromRule_NoData_AbsentFor(t *testing.T) {
|
||||
// 1. Call Eval with data at time t1, to populate lastTimestampWithDatapoints
|
||||
// 2. Call Eval without data at time t2
|
||||
// 3. Alert fires only if t2 - t1 > AbsentFor
|
||||
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
evalWindow := 5 * time.Minute
|
||||
|
||||
// Set target higher than test data (100.0) so regular threshold alerts don't fire
|
||||
target := 500.0
|
||||
|
||||
postableRule := ruletypes.PostableRule{
|
||||
AlertName: "Test no data with AbsentFor",
|
||||
AlertType: ruletypes.AlertTypeMetric,
|
||||
RuleType: ruletypes.RuleTypeProm,
|
||||
Evaluation: &ruletypes.EvaluationEnvelope{Kind: ruletypes.RollingEvaluation, Spec: ruletypes.RollingWindow{
|
||||
EvalWindow: ruletypes.Duration(evalWindow),
|
||||
Frequency: ruletypes.Duration(1 * time.Minute),
|
||||
}},
|
||||
RuleCondition: &ruletypes.RuleCondition{
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
AlertOnAbsent: true,
|
||||
Target: &target,
|
||||
CompositeQuery: &v3.CompositeQuery{
|
||||
QueryType: v3.QueryTypePromQL,
|
||||
PromQueries: map[string]*v3.PromQuery{
|
||||
"A": {Query: "test_metric"},
|
||||
},
|
||||
},
|
||||
Thresholds: &ruletypes.RuleThresholdData{
|
||||
Kind: ruletypes.BasicThresholdKind,
|
||||
Spec: ruletypes.BasicRuleThresholds{{
|
||||
Name: "Test no data with AbsentFor",
|
||||
TargetValue: &target,
|
||||
MatchType: ruletypes.AtleastOnce,
|
||||
CompareOp: ruletypes.ValueIsAbove,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
fingerprintCols := []cmock.ColumnType{
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "any(labels)", Type: "String"},
|
||||
}
|
||||
|
||||
samplesCols := []cmock.ColumnType{
|
||||
{Name: "metric_name", Type: "String"},
|
||||
{Name: "fingerprint", Type: "UInt64"},
|
||||
{Name: "unix_milli", Type: "Int64"},
|
||||
{Name: "value", Type: "Float64"},
|
||||
{Name: "flags", Type: "UInt32"},
|
||||
}
|
||||
|
||||
cases := []struct {
|
||||
description string
|
||||
absentFor uint64 // grace period in minutes
|
||||
timeBetweenEvals time.Duration // time between first eval (with data) and second eval (no data)
|
||||
expectAlertOnEval2 int
|
||||
}{
|
||||
{
|
||||
description: "WithinGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 4 * time.Minute,
|
||||
expectAlertOnEval2: 0,
|
||||
},
|
||||
{
|
||||
description: "AfterGracePeriod",
|
||||
absentFor: 5,
|
||||
timeBetweenEvals: 6 * time.Minute,
|
||||
expectAlertOnEval2: 1,
|
||||
},
|
||||
}
|
||||
|
||||
logger := instrumentationtest.New().Logger()
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.description, func(t *testing.T) {
|
||||
postableRule.RuleCondition.AbsentFor = c.absentFor
|
||||
|
||||
// Timestamps for two evaluations
|
||||
// t1 is the eval time for first eval, data points are in the past
|
||||
t1 := baseTime.Add(5 * time.Minute) // first eval with data
|
||||
t2 := t1.Add(c.timeBetweenEvals) // second eval without data
|
||||
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{}, &queryMatcherAny{})
|
||||
|
||||
fingerprint := uint64(12345)
|
||||
labelsJSON := `{"__name__":"test_metric"}`
|
||||
|
||||
// Helper to calculate query time range for an eval time
|
||||
calcQueryRange := func(evalTime time.Time) (int64, int64) {
|
||||
evalTimeMs := evalTime.UnixMilli()
|
||||
queryStart := ((evalTimeMs-2*evalWindow.Milliseconds())/60000)*60000 + 1
|
||||
queryEnd := (evalTimeMs / 60000) * 60000
|
||||
return queryStart, queryEnd
|
||||
}
|
||||
|
||||
// First eval (t1) - with data
|
||||
queryStart1, queryEnd1 := calcQueryRange(t1)
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(cmock.NewRows(fingerprintCols, [][]any{{fingerprint, labelsJSON}}))
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs("test_metric", "test_metric", "__name__", "test_metric", queryStart1, queryEnd1).
|
||||
WillReturnRows(cmock.NewRows(samplesCols, [][]any{
|
||||
// Data points in the past relative to t1
|
||||
{"test_metric", fingerprint, baseTime.UnixMilli(), 100.0, uint32(0)},
|
||||
{"test_metric", fingerprint, baseTime.Add(1 * time.Minute).UnixMilli(), 100.0, uint32(0)},
|
||||
{"test_metric", fingerprint, baseTime.Add(2 * time.Minute).UnixMilli(), 100.0, uint32(0)},
|
||||
}))
|
||||
|
||||
// Second eval (t2) - no data
|
||||
queryStart2, queryEnd2 := calcQueryRange(t2)
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT fingerprint, any").
|
||||
WithArgs("test_metric", "__name__", "test_metric").
|
||||
WillReturnRows(cmock.NewRows(fingerprintCols, [][]any{{fingerprint, labelsJSON}}))
|
||||
telemetryStore.Mock().
|
||||
ExpectQuery("SELECT metric_name, fingerprint, unix_milli").
|
||||
WithArgs("test_metric", "test_metric", "__name__", "test_metric", queryStart2, queryEnd2).
|
||||
WillReturnRows(cmock.NewRows(samplesCols, [][]any{})) // empty - no data
|
||||
|
||||
promProvider := prometheustest.New(
|
||||
context.Background(),
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
prometheus.Config{},
|
||||
telemetryStore,
|
||||
)
|
||||
defer func() {
|
||||
_ = promProvider.Close()
|
||||
}()
|
||||
|
||||
options := clickhouseReader.NewOptions("primaryNamespace")
|
||||
reader := clickhouseReader.NewReader(nil, telemetryStore, promProvider, "", time.Second, nil, nil, options)
|
||||
rule, err := NewPromRule("some-id", valuer.GenerateUUID(), &postableRule, logger, reader, promProvider)
|
||||
require.NoError(t, err)
|
||||
|
||||
// First eval with data - should NOT alert, but populates lastTimestampWithDatapoints
|
||||
alertsFound1, err := rule.Eval(context.Background(), t1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 0, alertsFound1, "First eval with data should not alert")
|
||||
|
||||
// Second eval without data - should alert based on AbsentFor
|
||||
alertsFound2, err := rule.Eval(context.Background(), t2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, c.expectAlertOnEval2, alertsFound2)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPromRuleEval_RequireMinPoints(t *testing.T) {
|
||||
// fixed base time for deterministic tests
|
||||
baseTime := time.Unix(1700000000, 0)
|
||||
|
||||
@@ -24,7 +24,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/querier"
|
||||
querierV2 "github.com/SigNoz/signoz/pkg/query-service/app/querier/v2"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/app/queryBuilder"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/constants"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/interfaces"
|
||||
v3 "github.com/SigNoz/signoz/pkg/query-service/model/v3"
|
||||
"github.com/SigNoz/signoz/pkg/query-service/utils/labels"
|
||||
@@ -462,26 +461,13 @@ func (r *ThresholdRule) buildAndRunQuery(ctx context.Context, orgID valuer.UUID,
|
||||
}
|
||||
}
|
||||
|
||||
if queryResult != nil && len(queryResult.Series) > 0 {
|
||||
r.lastTimestampWithDatapoints = time.Now()
|
||||
hasData := queryResult != nil && len(queryResult.Series) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
// if the data is missing for `For` duration then we should send alert
|
||||
if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(time.Duration(r.Condition().AbsentFor)*time.Minute).Before(time.Now()) {
|
||||
r.logger.InfoContext(ctx, "no data found for rule condition", "rule_id", r.ID())
|
||||
lbls := labels.NewBuilder(labels.Labels{})
|
||||
if !r.lastTimestampWithDatapoints.IsZero() {
|
||||
lbls.Set(ruletypes.LabelLastSeen, r.lastTimestampWithDatapoints.Format(constants.AlertTimeFormat))
|
||||
}
|
||||
resultVector = append(resultVector, ruletypes.Sample{
|
||||
Metric: lbls.Labels(),
|
||||
IsMissing: true,
|
||||
})
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
if queryResult == nil {
|
||||
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
|
||||
return resultVector, nil
|
||||
@@ -538,26 +524,13 @@ func (r *ThresholdRule) buildAndRunQueryV5(ctx context.Context, orgID valuer.UUI
|
||||
}
|
||||
}
|
||||
|
||||
if queryResult != nil && len(queryResult.Series) > 0 {
|
||||
r.lastTimestampWithDatapoints = time.Now()
|
||||
hasData := queryResult != nil && len(queryResult.Series) > 0
|
||||
if missingDataAlert := r.HandleMissingDataAlert(ctx, ts, hasData); missingDataAlert != nil {
|
||||
return ruletypes.Vector{*missingDataAlert}, nil
|
||||
}
|
||||
|
||||
var resultVector ruletypes.Vector
|
||||
|
||||
// if the data is missing for `For` duration then we should send alert
|
||||
if r.ruleCondition.AlertOnAbsent && r.lastTimestampWithDatapoints.Add(time.Duration(r.Condition().AbsentFor)*time.Minute).Before(time.Now()) {
|
||||
r.logger.InfoContext(ctx, "no data found for rule condition", "rule_id", r.ID())
|
||||
lbls := labels.NewBuilder(labels.Labels{})
|
||||
if !r.lastTimestampWithDatapoints.IsZero() {
|
||||
lbls.Set(ruletypes.LabelLastSeen, r.lastTimestampWithDatapoints.Format(constants.AlertTimeFormat))
|
||||
}
|
||||
resultVector = append(resultVector, ruletypes.Sample{
|
||||
Metric: lbls.Labels(),
|
||||
IsMissing: true,
|
||||
})
|
||||
return resultVector, nil
|
||||
}
|
||||
|
||||
if queryResult == nil {
|
||||
r.logger.WarnContext(ctx, "query result is nil", "rule_name", r.Name(), "query_name", selectedQuery)
|
||||
return resultVector, nil
|
||||
|
||||
@@ -61,13 +61,11 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type fieldMapper struct {
|
||||
}
|
||||
type fieldMapper struct {}
|
||||
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
@@ -254,12 +252,27 @@ func (m *fieldMapper) buildFieldForJSON(key *telemetrytypes.TelemetryFieldKey) (
|
||||
"plan length is less than 2 for promoted path: %s", key.Name)
|
||||
}
|
||||
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
node := plan[1]
|
||||
promotedExpr := fmt.Sprintf(
|
||||
"dynamicElement(%s, '%s')",
|
||||
node.FieldPath(),
|
||||
node.TerminalConfig.ElemType.StringValue(),
|
||||
)
|
||||
|
||||
// dynamicElement returns NULL for scalar types or an empty array for array types.
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
expr = fmt.Sprintf(
|
||||
"if(length(%s) > 0, %s, %s)",
|
||||
promotedExpr,
|
||||
promotedExpr,
|
||||
expr,
|
||||
)
|
||||
} else {
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", promotedExpr, expr)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return expr, nil
|
||||
@@ -281,8 +294,7 @@ func (m *fieldMapper) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (stri
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic) so generated SQL
|
||||
// is stable across environments; map iteration order is random in Go.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic)
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
for _, branchType := range node.BranchesInOrder() {
|
||||
|
||||
@@ -73,10 +73,10 @@ func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONA
|
||||
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
case qbtypes.FilterOperatorContains:
|
||||
operator = qbtypes.FilterOperatorIn
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
operator = qbtypes.FilterOperatorNotIn
|
||||
}
|
||||
}
|
||||
|
||||
@@ -191,13 +191,14 @@ func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytype
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, "x", operator)
|
||||
key := "x"
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
|
||||
@@ -316,7 +316,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (toFloat64(x) = ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (toFloat64(x) = ?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -345,7 +345,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (x = ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (x = ?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -360,7 +360,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (toString(x) = ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (x = ?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -389,7 +389,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(toFloat64OrNull(x) -> toFloat64OrNull(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> (toFloat64(x) = ?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> (toFloat64OrNull(x) = ?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%4%", float64(4), "%4%", float64(4), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
|
||||
},
|
||||
@@ -404,7 +404,7 @@ func TestStatementBuilderListQueryBody(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))')) OR arrayExists(x -> (toString(x) = ?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(Int64))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))'))) OR (arrayExists(`body_json.interests`-> arrayExists(`body_json.interests[].entities`-> arrayExists(`body_json.interests[].entities[].reviews`-> arrayExists(`body_json.interests[].entities[].reviews[].entries`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata`-> arrayExists(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))')) OR arrayExists(x -> (x = ?), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata[].positions`.`ratings`, 'Array(Nullable(String))'))), dynamicElement(`body_json.interests[].entities[].reviews[].entries[].metadata`.`positions`, 'Array(JSON(max_dynamic_types=0, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews[].entries`.`metadata`, 'Array(JSON(max_dynamic_types=1, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities[].reviews`.`entries`, 'Array(JSON(max_dynamic_types=2, max_dynamic_paths=0))')), dynamicElement(`body_json.interests[].entities`.`reviews`, 'Array(JSON(max_dynamic_types=4, max_dynamic_paths=0))')), dynamicElement(`body_json.interests`.`entities`, 'Array(JSON(max_dynamic_types=8, max_dynamic_paths=0))')), dynamicElement(body_json.`interests`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%Good%", "Good", "%Good%", "Good", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `interests[].entities[].reviews[].entries[].metadata[].positions[].ratings` is ambiguous, found 2 different combinations of field context / data type: [name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]int64,jsondatatype=Array(Nullable(Int64)) name=interests[].entities[].reviews[].entries[].metadata[].positions[].ratings,context=body,datatype=[]string,jsondatatype=Array(Nullable(String))]."},
|
||||
},
|
||||
@@ -492,7 +492,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
disableBodyJSONQuery(t)
|
||||
}()
|
||||
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, "education")
|
||||
statementBuilder := buildJSONTestStatementBuilder(t, "education", "tags")
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
@@ -500,6 +500,20 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "Has Array promoted uses body fallback",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "has(body.tags, 'production')"},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(if(length(dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))')) > 0, dynamicElement(body_json_promoted.`tags`, 'Array(Nullable(String))'), dynamicElement(body_json.`tags`, 'Array(Nullable(String))')), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "production", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "Key inside Array(JSON) exists",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
@@ -551,7 +565,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(toFloat64(x) -> toFloat64(x) = ?, arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (toFloat64(x) = ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (toFloat64(x) = ?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (toFloat64(x) = ?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (toFloat64(x) = ?), arrayMap(x->dynamicElement(x, 'Float64'), arrayFilter(x->(dynamicType(x) = 'Float64'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "%1.65%", 1.65, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -580,7 +594,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> x = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (x = ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (x = ?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (x = ?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (x = ?), arrayMap(x->dynamicElement(x, 'Bool'), arrayFilter(x->(dynamicType(x) = 'Bool'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%true%", true, "%true%", true, "%true%", true, "%true%", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
@@ -595,7 +609,7 @@ func TestStatementBuilderListQueryBodyPromoted(t *testing.T) {
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(toString(x) -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(toString(x) -> toString(x) = ?, dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> x = ?, arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, body_json, body_json_promoted, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (toString(x) = ?), dynamicElement(`body_json.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(toString(x)) LIKE LOWER(?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))')) OR arrayExists(x -> (toString(x) = ?), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Nullable(Float64))'))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))'))) OR (arrayExists(`body_json.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (x = ?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=0))')) OR arrayExists(`body_json_promoted.education`-> (arrayExists(x -> LOWER(x) LIKE LOWER(?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)')))) OR arrayExists(x -> (x = ?), arrayMap(x->dynamicElement(x, 'String'), arrayFilter(x->(dynamicType(x) = 'String'), dynamicElement(`body_json_promoted.education`.`parameters`, 'Array(Dynamic)'))))), dynamicElement(body_json_promoted.`education`, 'Array(JSON(max_dynamic_types=16, max_dynamic_paths=256))')))) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "%passed%", "passed", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"Key `education[].parameters` is ambiguous, found 2 different combinations of field context / data type: [name=education[].parameters,context=body,datatype=[]float64,materialized=true,jsondatatype=Array(Nullable(Float64)) name=education[].parameters,context=body,datatype=[]dynamic,materialized=true,jsondatatype=Array(Dynamic)]."},
|
||||
},
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
|
||||
@@ -49,7 +48,15 @@ func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
|
||||
if len(v) > 0 {
|
||||
valueType, _ = InferDataType(v[0], operator, key)
|
||||
}
|
||||
return valueType, v
|
||||
// Between/In/NotIn compare a scalar field to multiple values - use scalar type for the field expression.
|
||||
// For array paths (key has [*] or []), In means array membership so keep array type.
|
||||
if operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorIn || operator == qbtypes.FilterOperatorNotIn {
|
||||
if !strings.HasSuffix(key.Name, telemetrytypes.ArrayAnyIndex) && !strings.HasSuffix(key.Name, telemetrytypes.ArraySep) {
|
||||
return valueType, v
|
||||
}
|
||||
}
|
||||
// convert the scaler type to the array type
|
||||
return telemetrytypes.ScalerFieldTypeToArrayFieldType[valueType], v
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
@@ -60,11 +67,6 @@ func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytyp
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
}
|
||||
|
||||
// check if it is array
|
||||
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
|
||||
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
|
||||
}
|
||||
|
||||
return valueType, value
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/chcol"
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz-otel-collector/utils"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
@@ -111,7 +112,8 @@ func (t *telemetryMetaStore) buildBodyJSONPaths(ctx context.Context,
|
||||
}
|
||||
|
||||
for _, fieldKey := range fieldKeys {
|
||||
fieldKey.Materialized = promoted[fieldKey.Name]
|
||||
promotedKey := strings.Split(fieldKey.Name, telemetrytypes.ArraySep)[0]
|
||||
fieldKey.Materialized = promoted.Contains(promotedKey)
|
||||
fieldKey.Indexes = indexes[fieldKey.Name]
|
||||
}
|
||||
|
||||
@@ -293,6 +295,33 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
return indexes, nil
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to load promoted paths")
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
next := make(map[string]struct{})
|
||||
for rows.Next() {
|
||||
var path string
|
||||
if err := rows.Scan(&path); err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadPromotedPaths, "failed to scan promoted path")
|
||||
}
|
||||
next[path] = struct{}{}
|
||||
}
|
||||
|
||||
return next, nil
|
||||
}
|
||||
|
||||
// TODO(Piyush): Remove this if not used in future
|
||||
func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, limit int) (*telemetrytypes.TelemetryFieldValues, bool, error) {
|
||||
path = CleanPathPrefixes(path)
|
||||
@@ -455,12 +484,11 @@ func derefValue(v any) any {
|
||||
return val.Interface()
|
||||
}
|
||||
|
||||
// IsPathPromoted checks if a specific path is promoted (Column Evolution table: field_name for logs body).
|
||||
// IsPathPromoted checks if a specific path is promoted
|
||||
func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (bool, error) {
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
pathSegment := split[0]
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE signal = ? AND column_name = ? AND field_context = ? AND field_name = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, telemetrytypes.SignalLogs, telemetrylogs.LogsV2BodyPromotedColumn, telemetrytypes.FieldContextBody, pathSegment)
|
||||
query := fmt.Sprintf("SELECT 1 FROM %s.%s WHERE path = ? LIMIT 1", DBName, PromotedPathsTableName)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, split[0])
|
||||
if err != nil {
|
||||
return false, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to check if path %s is promoted", path)
|
||||
}
|
||||
@@ -469,23 +497,15 @@ func (t *telemetryMetaStore) IsPathPromoted(ctx context.Context, path string) (b
|
||||
return rows.Next(), nil
|
||||
}
|
||||
|
||||
// GetPromotedPaths returns promoted paths from the Column Evolution table (field_name for logs body).
|
||||
func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...string) (map[string]bool, error) {
|
||||
sb := sqlbuilder.Select("field_name").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
conditions := []string{
|
||||
sb.Equal("signal", telemetrytypes.SignalLogs),
|
||||
sb.Equal("column_name", telemetrylogs.LogsV2BodyPromotedColumn),
|
||||
sb.Equal("field_context", telemetrytypes.FieldContextBody),
|
||||
sb.NotEqual("field_name", "__all__"),
|
||||
// GetPromotedPaths checks if a specific path is promoted
|
||||
func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...string) (*utils.ConcurrentSet[string], error) {
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
pathConditions = append(pathConditions, sb.Equal("path", split[0]))
|
||||
}
|
||||
if len(paths) > 0 {
|
||||
pathArgs := make([]interface{}, len(paths))
|
||||
for i, p := range paths {
|
||||
pathArgs[i] = p
|
||||
}
|
||||
conditions = append(conditions, sb.In("field_name", pathArgs))
|
||||
}
|
||||
sb.Where(sb.And(conditions...))
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
@@ -494,13 +514,13 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
promotedPaths := make(map[string]bool)
|
||||
promotedPaths := utils.NewConcurrentSet[string]()
|
||||
for rows.Next() {
|
||||
var fieldName string
|
||||
if err := rows.Scan(&fieldName); err != nil {
|
||||
var path string
|
||||
if err := rows.Scan(&path); err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailCheckPathPromoted, "failed to scan promoted path")
|
||||
}
|
||||
promotedPaths[fieldName] = true
|
||||
promotedPaths.Insert(path)
|
||||
}
|
||||
|
||||
return promotedPaths, nil
|
||||
@@ -514,22 +534,21 @@ func CleanPathPrefixes(path string) string {
|
||||
return path
|
||||
}
|
||||
|
||||
// PromotePaths inserts promoted paths into the Column Evolution table (same schema as signoz-otel-collector metadata_migrations).
|
||||
func (t *telemetryMetaStore) PromotePaths(ctx context.Context, paths ...string) error {
|
||||
batch, err := t.telemetrystore.ClickhouseDB().PrepareBatch(ctx,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (signal, column_name, column_type, field_context, field_name, version, release_time) VALUES", DBName,
|
||||
fmt.Sprintf("INSERT INTO %s.%s (path, created_at) VALUES", DBName,
|
||||
PromotedPathsTableName))
|
||||
if err != nil {
|
||||
return errors.WrapInternalf(err, CodeFailedToPrepareBatch, "failed to prepare batch")
|
||||
}
|
||||
|
||||
releaseTime := time.Now().UnixNano()
|
||||
nowMs := uint64(time.Now().UnixMilli())
|
||||
for _, p := range paths {
|
||||
trimmed := strings.TrimSpace(p)
|
||||
if trimmed == "" {
|
||||
continue
|
||||
}
|
||||
if err := batch.Append(telemetrytypes.SignalLogs, telemetrylogs.LogsV2BodyPromotedColumn, "JSON()", telemetrytypes.FieldContextBody, trimmed, 0, releaseTime); err != nil {
|
||||
if err := batch.Append(trimmed, nowMs); err != nil {
|
||||
_ = batch.Abort()
|
||||
return errors.WrapInternalf(err, CodeFailedToAppendPath, "failed to append path")
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ const (
|
||||
AttributesMetadataTableName = "distributed_attributes_metadata"
|
||||
AttributesMetadataLocalTableName = "attributes_metadata"
|
||||
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
||||
// Column Evolution table stores promoted paths as (signal, column_name, field_context, field_name); see signoz-otel-collector metadata_migrations.
|
||||
PromotedPathsTableName = "distributed_column_evolution_metadata"
|
||||
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
|
||||
SkipIndexTableName = "system.data_skipping_indices"
|
||||
)
|
||||
|
||||
@@ -93,6 +93,14 @@ var (
|
||||
FieldDataTypeArrayFloat64: "Array(Float64)",
|
||||
FieldDataTypeArrayBool: "Array(Bool)",
|
||||
}
|
||||
|
||||
ScalerFieldTypeToArrayFieldType = map[FieldDataType]FieldDataType{
|
||||
FieldDataTypeString: FieldDataTypeArrayString,
|
||||
FieldDataTypeBool: FieldDataTypeArrayBool,
|
||||
FieldDataTypeNumber: FieldDataTypeArrayNumber,
|
||||
FieldDataTypeInt64: FieldDataTypeArrayInt64,
|
||||
FieldDataTypeFloat64: FieldDataTypeArrayFloat64,
|
||||
}
|
||||
)
|
||||
|
||||
func (f FieldDataType) CHDataType() string {
|
||||
|
||||
@@ -36,7 +36,7 @@ type MetadataStore interface {
|
||||
ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error)
|
||||
|
||||
// ListPromotedPaths lists the promoted paths.
|
||||
GetPromotedPaths(ctx context.Context, paths ...string) (map[string]bool, error)
|
||||
ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error)
|
||||
|
||||
// PromotePaths promotes the paths.
|
||||
PromotePaths(ctx context.Context, paths ...string) error
|
||||
|
||||
@@ -16,7 +16,7 @@ type MockMetadataStore struct {
|
||||
RelatedValuesMap map[string][]string
|
||||
AllValuesMap map[string]*telemetrytypes.TelemetryFieldValues
|
||||
TemporalityMap map[string]metrictypes.Temporality
|
||||
PromotedPathsMap map[string]bool
|
||||
PromotedPathsMap map[string]struct{}
|
||||
LogsJSONIndexesMap map[string][]schemamigrator.Index
|
||||
LookupKeysMap map[telemetrytypes.MetricMetadataLookupKey]int64
|
||||
}
|
||||
@@ -28,7 +28,7 @@ func NewMockMetadataStore() *MockMetadataStore {
|
||||
RelatedValuesMap: make(map[string][]string),
|
||||
AllValuesMap: make(map[string]*telemetrytypes.TelemetryFieldValues),
|
||||
TemporalityMap: make(map[string]metrictypes.Temporality),
|
||||
PromotedPathsMap: make(map[string]bool),
|
||||
PromotedPathsMap: make(map[string]struct{}),
|
||||
LogsJSONIndexesMap: make(map[string][]schemamigrator.Index),
|
||||
LookupKeysMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
}
|
||||
@@ -295,13 +295,13 @@ func (m *MockMetadataStore) SetTemporality(metricName string, temporality metric
|
||||
// PromotePaths promotes the paths.
|
||||
func (m *MockMetadataStore) PromotePaths(ctx context.Context, paths ...string) error {
|
||||
for _, path := range paths {
|
||||
m.PromotedPathsMap[path] = true
|
||||
m.PromotedPathsMap[path] = struct{}{}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetPromotedPaths returns the promoted paths.
|
||||
func (m *MockMetadataStore) GetPromotedPaths(ctx context.Context, paths ...string) (map[string]bool, error) {
|
||||
// ListPromotedPaths lists the promoted paths.
|
||||
func (m *MockMetadataStore) ListPromotedPaths(ctx context.Context, paths ...string) (map[string]struct{}, error) {
|
||||
return m.PromotedPathsMap, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -65,6 +65,7 @@ func TestJSONTypeSet() (map[string][]JSONDataType, MetadataStore) {
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {ArrayInt64, ArrayString},
|
||||
"message": {String},
|
||||
"tags": {ArrayString},
|
||||
}
|
||||
|
||||
return types, nil
|
||||
|
||||
@@ -19,6 +19,7 @@ pytest_plugins = [
|
||||
"fixtures.idp",
|
||||
"fixtures.idputils",
|
||||
"fixtures.notification_channel",
|
||||
"fixtures.alerts",
|
||||
]
|
||||
|
||||
|
||||
|
||||
110
tests/integration/fixtures/alerts.py
Normal file
110
tests/integration/fixtures/alerts.py
Normal file
@@ -0,0 +1,110 @@
|
||||
from datetime import datetime, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.logs import Logs
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.traces import Traces
|
||||
from fixtures.utils import get_testdata_file_path
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
|
||||
@pytest.fixture(name="create_alert_rule", scope="function")
|
||||
def create_alert_rule(
|
||||
signoz: types.SigNoz, get_token: Callable[[str, str], str]
|
||||
) -> Callable[[dict], str]:
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
rule_ids = []
|
||||
|
||||
def _create_alert_rule(rule_data: dict) -> str:
|
||||
response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/rules"),
|
||||
json=rule_data,
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
assert (
|
||||
response.status_code == HTTPStatus.OK
|
||||
), f"Failed to create rule, api returned {response.status_code} with response: {response.text}"
|
||||
rule_id = response.json()["data"]["id"]
|
||||
rule_ids.append(rule_id)
|
||||
return rule_id
|
||||
|
||||
def _delete_alert_rule(rule_id: str):
|
||||
logger.info("Deleting rule: %s", {"rule_id": rule_id})
|
||||
response = requests.delete(
|
||||
signoz.self.host_configs["8080"].get(f"/api/v1/rules/{rule_id}"),
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=5,
|
||||
)
|
||||
if response.status_code != HTTPStatus.OK:
|
||||
raise Exception( # pylint: disable=broad-exception-raised
|
||||
f"Failed to delete rule, api returned {response.status_code} with response: {response.text}"
|
||||
)
|
||||
|
||||
yield _create_alert_rule
|
||||
# delete the rule on cleanup
|
||||
for rule_id in rule_ids:
|
||||
try:
|
||||
_delete_alert_rule(rule_id)
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.error("Error deleting rule: %s", {"rule_id": rule_id, "error": e})
|
||||
|
||||
|
||||
@pytest.fixture(name="insert_alert_data", scope="function")
|
||||
def insert_alert_data(
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
insert_traces: Callable[[List[Traces]], None],
|
||||
insert_logs: Callable[[List[Logs]], None],
|
||||
) -> Callable[[List[types.AlertData]], None]:
|
||||
|
||||
def _insert_alert_data(
|
||||
alert_data_items: List[types.AlertData],
|
||||
base_time: datetime = None,
|
||||
) -> None:
|
||||
|
||||
metrics: List[Metrics] = []
|
||||
traces: List[Traces] = []
|
||||
logs: List[Logs] = []
|
||||
|
||||
now = base_time or datetime.now(tz=timezone.utc).replace(
|
||||
second=0, microsecond=0
|
||||
)
|
||||
|
||||
for data_item in alert_data_items:
|
||||
if data_item.type == "metrics":
|
||||
_metrics = Metrics.load_from_file(
|
||||
get_testdata_file_path(data_item.data_path),
|
||||
base_time=now,
|
||||
)
|
||||
metrics.extend(_metrics)
|
||||
elif data_item.type == "traces":
|
||||
_traces = Traces.load_from_file(
|
||||
get_testdata_file_path(data_item.data_path),
|
||||
base_time=now,
|
||||
)
|
||||
traces.extend(_traces)
|
||||
elif data_item.type == "logs":
|
||||
_logs = Logs.load_from_file(
|
||||
get_testdata_file_path(data_item.data_path),
|
||||
base_time=now,
|
||||
)
|
||||
logs.extend(_logs)
|
||||
|
||||
# Add data to ClickHouse if any data is present
|
||||
if len(metrics) > 0:
|
||||
insert_metrics(metrics)
|
||||
if len(traces) > 0:
|
||||
insert_traces(traces)
|
||||
if len(logs) > 0:
|
||||
insert_logs(logs)
|
||||
|
||||
yield _insert_alert_data
|
||||
@@ -9,6 +9,7 @@ from ksuid import KsuidMs
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.fingerprint import LogsOrTracesFingerprint
|
||||
from fixtures.utils import parse_timestamp
|
||||
|
||||
|
||||
class LogsResource(ABC):
|
||||
@@ -329,6 +330,59 @@ class Logs(ABC):
|
||||
]
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_dict(
|
||||
cls,
|
||||
data: dict,
|
||||
) -> "Logs":
|
||||
"""Create a Logs instance from a dict."""
|
||||
# parse timestamp from iso format
|
||||
timestamp = parse_timestamp(data["timestamp"])
|
||||
return cls(
|
||||
timestamp=timestamp,
|
||||
resources=data.get("resources", {}),
|
||||
attributes=data.get("attributes", {}),
|
||||
body=data["body"],
|
||||
severity_text=data.get("severity_text", "INFO"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def load_from_file(
|
||||
cls,
|
||||
file_path: str,
|
||||
base_time: Optional[datetime.datetime] = None,
|
||||
) -> List["Logs"]:
|
||||
"""Load logs from a JSONL file."""
|
||||
|
||||
data_list = []
|
||||
with open(file_path, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
data_list.append(json.loads(line))
|
||||
|
||||
# If base_time provided, calculate time offset
|
||||
time_offset = datetime.timedelta(0)
|
||||
if base_time is not None:
|
||||
# Find earliest timestamp
|
||||
earliest = None
|
||||
for data in data_list:
|
||||
ts = parse_timestamp(data["timestamp"])
|
||||
if earliest is None or ts < earliest:
|
||||
earliest = ts
|
||||
if earliest is not None:
|
||||
time_offset = base_time - earliest
|
||||
|
||||
logs = []
|
||||
for data in data_list:
|
||||
original_ts = parse_timestamp(data["timestamp"])
|
||||
adjusted_ts = original_ts + time_offset
|
||||
data["timestamp"] = adjusted_ts.isoformat()
|
||||
logs.append(cls.from_dict(data))
|
||||
|
||||
return logs
|
||||
|
||||
|
||||
@pytest.fixture(name="insert_logs", scope="function")
|
||||
def insert_logs(
|
||||
|
||||
@@ -8,6 +8,7 @@ import numpy as np
|
||||
import pytest
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.utils import parse_timestamp
|
||||
|
||||
|
||||
class MetricsTimeSeries(ABC):
|
||||
@@ -341,10 +342,7 @@ class Metrics(ABC):
|
||||
metric_name_override: If provided, overrides the metric_name from data
|
||||
"""
|
||||
# parse timestamp from iso format
|
||||
ts_str = data["timestamp"]
|
||||
if ts_str.endswith("Z"):
|
||||
ts_str = ts_str[:-1] + "+00:00"
|
||||
timestamp = datetime.datetime.fromisoformat(ts_str)
|
||||
timestamp = parse_timestamp(data["timestamp"])
|
||||
|
||||
return cls(
|
||||
metric_name=metric_name_override or data["metric_name"],
|
||||
@@ -397,10 +395,7 @@ class Metrics(ABC):
|
||||
# Find earliest timestamp
|
||||
earliest = None
|
||||
for data in data_list:
|
||||
ts_str = data["timestamp"]
|
||||
if ts_str.endswith("Z"):
|
||||
ts_str = ts_str[:-1] + "+00:00"
|
||||
ts = datetime.datetime.fromisoformat(ts_str)
|
||||
ts = parse_timestamp(data["timestamp"])
|
||||
if earliest is None or ts < earliest:
|
||||
earliest = ts
|
||||
if earliest is not None:
|
||||
@@ -408,10 +403,7 @@ class Metrics(ABC):
|
||||
|
||||
metrics = []
|
||||
for data in data_list:
|
||||
ts_str = data["timestamp"]
|
||||
if ts_str.endswith("Z"):
|
||||
ts_str = ts_str[:-1] + "+00:00"
|
||||
original_ts = datetime.datetime.fromisoformat(ts_str)
|
||||
original_ts = parse_timestamp(data["timestamp"])
|
||||
adjusted_ts = original_ts + time_offset
|
||||
data["timestamp"] = adjusted_ts.isoformat()
|
||||
metrics.append(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user