Compare commits

..

17 Commits

Author SHA1 Message Date
Ashwin Bhatkal
07096fb491 Merge branch 'chore/issue-3780-use-variable-state-updates-3' into chore/issue-3780-use-variable-state-updates-4 2026-02-13 14:14:35 +05:30
Ashwin Bhatkal
5ca0c601cf chore: replace prop drilling with fetch store 2026-02-13 14:11:05 +05:30
Ashwin Bhatkal
9e4ad75f05 Merge branch 'main' into chore/issue-3780-use-variable-state-updates-2 2026-02-13 14:07:39 +05:30
Ashwin Bhatkal
3ee79fbe72 chore: fix tests 2026-02-13 13:33:13 +05:30
Ashwin Bhatkal
a4a08b9781 chore: move tests 2026-02-13 13:17:02 +05:30
Ashwin Bhatkal
8b0f6380ac chore: add tests 2026-02-13 13:13:13 +05:30
Ashwin Bhatkal
b1857d6fda Merge branch 'main' into chore/issue-3780-use-variable-state-updates-2 2026-02-13 12:17:58 +05:30
Ashwin Bhatkal
55026caa6a Merge branch 'chore/issue-3780-use-variable-state-updates' into chore/issue-3780-use-variable-state-updates-2 2026-02-13 01:38:17 +05:30
Ashwin Bhatkal
e5f7fa93d0 chore: add tests 2026-02-13 01:36:42 +05:30
Ashwin Bhatkal
a030938436 Merge branch 'chore/issue-3780-use-variable-state-updates-3' into chore/issue-3780-use-variable-state-updates-4 2026-02-13 01:06:26 +05:30
Ashwin Bhatkal
b558ec9e97 Merge branch 'chore/issue-3780-use-variable-state-updates-2' into chore/issue-3780-use-variable-state-updates-3 2026-02-13 01:06:21 +05:30
Ashwin Bhatkal
915f4cc2c5 Merge branch 'chore/issue-3780-use-variable-state-updates' into chore/issue-3780-use-variable-state-updates-2 2026-02-13 01:06:14 +05:30
Ashwin Bhatkal
5aba097c5b Merge branch 'main' into chore/issue-3780-use-variable-state-updates 2026-02-13 01:05:01 +05:30
Ashwin Bhatkal
942a9745f9 chore: variables based panel fetching 2026-02-13 00:58:06 +05:30
Ashwin Bhatkal
38985d8ae1 chore: replace prop drilling with fetch store 2026-02-13 00:56:26 +05:30
Ashwin Bhatkal
f0b80402d5 chore: variable fetch state machine 2026-02-13 00:47:38 +05:30
Ashwin Bhatkal
8a39a4b622 chore: shared utils update + API plumbing 2026-02-13 00:42:52 +05:30
45 changed files with 3038 additions and 1021 deletions

View File

@@ -285,6 +285,7 @@ flagger:
config:
boolean:
use_span_metrics: true
interpolation_enabled: false
kafka_span_eval: false
string:
float:

View File

@@ -78,12 +78,10 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
describe('VariableItem Integration Tests', () => {
let user: ReturnType<typeof userEvent.setup>;
let mockOnValueUpdate: jest.Mock;
let mockSetVariablesToGetUpdated: jest.Mock;
beforeEach(() => {
user = userEvent.setup();
mockOnValueUpdate = jest.fn();
mockSetVariablesToGetUpdated = jest.fn();
jest.clearAllMocks();
});
@@ -102,9 +100,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -150,9 +145,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -195,9 +187,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -247,9 +236,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -272,9 +258,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -308,9 +291,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -344,9 +324,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -369,9 +346,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -405,9 +379,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -461,9 +432,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -508,9 +476,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -548,9 +513,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -582,9 +544,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);

View File

@@ -9,11 +9,15 @@ import {
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
import './DashboardVariableSelection.styles.scss';
@@ -22,8 +26,6 @@ function DashboardVariableSelection(): JSX.Element | null {
const {
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
} = useDashboard();
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
@@ -55,11 +57,14 @@ function DashboardVariableSelection(): JSX.Element | null {
[dependencyData?.order],
);
// Trigger refetch when dependency order changes or global time changes
// Initialize fetch store then start a new fetch cycle.
// Runs on dependency order changes, and time range changes.
useEffect(() => {
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(allVariableNames);
enqueueFetchOfAllVariables();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
@@ -121,29 +126,14 @@ function DashboardVariableSelection(): JSX.Element | null {
return prev;
});
if (dependencyData) {
const updatedVariables: string[] = [];
onUpdateVariableNode(
name,
dependencyData.graph,
dependencyData.order,
(node) => updatedVariables.push(node),
);
setVariablesToGetUpdated((prev) => [
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
]);
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
// Cascade: enqueue query-type descendants for refetching
enqueueDescendantsOfVariable(name);
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
@@ -158,9 +148,6 @@ function DashboardVariableSelection(): JSX.Element | null {
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
);
})}

View File

@@ -2,18 +2,25 @@ import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import useDebounce from 'hooks/useDebounce';
import { isEmpty } from 'lodash-es';
import { AppState } from 'store/reducers';
import { SuccessResponseV2 } from 'types/api';
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { getOptionsForDynamicVariable } from './util';
import {
buildExistingDynamicVariableQuery,
extractErrorMessage,
getOptionsForDynamicVariable,
mergeUniqueStrings,
settleVariableFetch,
} from './util';
import { VariableItemProps } from './VariableItem';
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
@@ -24,7 +31,6 @@ type DynamicVariableInputProps = Pick<
'variableData' | 'onValueUpdate' | 'existingVariables'
>;
// eslint-disable-next-line sonarjs/cognitive-complexity
function DynamicVariableInput({
variableData,
onValueUpdate,
@@ -55,14 +61,8 @@ function DynamicVariableInput({
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
() => mergeUniqueStrings(optionsData, relatedValues),
[optionsData, relatedValues],
);
@@ -104,67 +104,24 @@ function DynamicVariableInput({
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
// eslint-disable-next-line sonarjs/cognitive-complexity
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((val) => val !== null && val !== undefined && val !== '')
.map((val) => val.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((val) => {
// Check if value is a number
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${val.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
const existingQuery = useMemo(
() =>
buildExistingDynamicVariableQuery(
existingVariables,
variableData.id,
!!variableData.dynamicVariablesAttribute,
),
[existingVariables, variableData.id, variableData.dynamicVariablesAttribute],
);
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
const handleSelectDropdownVisibilityChange = useCallback(
@@ -182,6 +139,73 @@ function DynamicVariableInput({
[onDropdownVisibleChange, optionsData, originalRelatedValues],
);
const handleQuerySuccess = useCallback(
(data: SuccessResponseV2<FieldValueResponse>): void => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Sync temp selection with latest API values when ALL is active and dropdown is open
if (variableData.allSelected && isDropdownOpen) {
const latestValues = mergeUniqueStrings(
newNormalizedValues,
newRelatedValues,
);
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
applyDefaultIfNeeded(
mergeUniqueStrings(newNormalizedValues, newRelatedValues),
);
}
settleVariableFetch(variableData.name, 'complete');
},
[
debouncedApiSearchText,
variableData.allSelected,
variableData.name,
isDropdownOpen,
tempSelection,
setTempSelection,
applyDefaultIfNeeded,
],
);
const handleQueryError = useCallback(
(error: { message?: string } | null): void => {
if (error) {
setErrorMessage(extractErrorMessage(error));
setIsRetryableError(checkIfRetryableError(error));
}
settleVariableFetch(variableData.name, 'failure');
},
[variableData.name],
);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
@@ -192,13 +216,14 @@ function DynamicVariableInput({
debouncedApiSearchText,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
variableFetchCycleId,
],
{
enabled:
variableData.type === 'DYNAMIC' &&
!!variableData.dynamicVariablesSource &&
!!variableData.dynamicVariablesAttribute,
queryFn: () =>
!!variableData.dynamicVariablesAttribute &&
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
queryFn: ({ signal }) =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
@@ -211,70 +236,10 @@ function DynamicVariableInput({
minTime,
maxTime,
existingQuery,
signal,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
const allNewOptions = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
applyDefaultIfNeeded(allNewOptions);
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
onSuccess: handleQuerySuccess,
onError: handleQueryError,
},
);
@@ -336,6 +301,8 @@ function DynamicVariableInput({
showRetryButton={isRetryableError}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
onSearch={handleSearch}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -3,8 +3,9 @@ import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { isArray, isEmpty, isString } from 'lodash-es';
import { AppState } from 'store/reducers';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -12,26 +13,18 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
import { areArraysEqual, settleVariableFetch } from './util';
import { VariableItemProps } from './VariableItem';
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
type QueryVariableInputProps = Pick<
VariableItemProps,
| 'variableData'
| 'existingVariables'
| 'onValueUpdate'
| 'variablesToGetUpdated'
| 'setVariablesToGetUpdated'
| 'dependencyData'
'variableData' | 'existingVariables' | 'onValueUpdate'
>;
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
@@ -43,6 +36,15 @@ function QueryVariableInput({
(state) => state.globalTime,
);
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const {
tempSelection,
setTempSelection,
@@ -60,16 +62,6 @@ function QueryVariableInput({
strategy: queryVariableSelectStrategy,
});
const validVariableUpdate = useCallback((): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
}, [variableData.name, variablesToGetUpdated]);
const getOptions = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(variablesRes: VariableResponseProps | null): void => {
@@ -103,18 +95,24 @@ function QueryVariableInput({
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (variableData.name && (valueNotInList || variableData.allSelected)) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(
variableData.name,
variableData.id,
newOptionsData,
true,
);
}
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
@@ -132,7 +130,11 @@ function QueryVariableInput({
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
@@ -141,10 +143,6 @@ function QueryVariableInput({
setOptionsData(newOptionsData);
// Apply default if no value is selected (e.g., new variable, first load)
applyDefaultIfNeeded(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
@@ -157,8 +155,6 @@ function QueryVariableInput({
onValueUpdate,
tempSelection,
setTempSelection,
validVariableUpdate,
setVariablesToGetUpdated,
applyDefaultIfNeeded,
],
);
@@ -169,27 +165,24 @@ function QueryVariableInput({
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
variableFetchCycleId,
],
{
enabled:
variableData &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
queryFn: ({ signal }) =>
dashboardVariablesQuery(
{
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
},
signal,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
settleVariableFetch(variableData.name, 'complete');
},
onError: (error: {
details: {
@@ -206,9 +199,7 @@ function QueryVariableInput({
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
settleVariableFetch(variableData.name, 'failure');
},
},
);
@@ -242,6 +233,8 @@ function QueryVariableInput({
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -28,6 +28,8 @@ interface SelectVariableInputProps {
showRetryButton?: boolean;
showIncompleteDataMessage?: boolean;
onSearch?: (searchTerm: string) => void;
waiting?: boolean;
waitingMessage?: string;
}
const MAX_TAG_DISPLAY_VALUES = 10;
@@ -65,6 +67,7 @@ function SelectVariableInput({
showRetryButton,
showIncompleteDataMessage,
onSearch,
waitingMessage,
}: SelectVariableInputProps): JSX.Element {
const commonProps = useMemo(
() => ({
@@ -78,7 +81,6 @@ function SelectVariableInput({
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
@@ -86,6 +88,8 @@ function SelectVariableInput({
'data-testid': 'variable-select',
onChange,
loading,
waitingMessage,
style: SelectItemStyle,
options,
errorMessage,
onRetry,
@@ -101,6 +105,7 @@ function SelectVariableInput({
defaultValue,
onChange,
loading,
waitingMessage,
options,
value,
errorMessage,

View File

@@ -47,14 +47,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -69,14 +61,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -92,14 +76,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -133,14 +109,6 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -163,14 +131,6 @@ describe('VariableItem', () => {
variableData={customVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -185,14 +145,6 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);

View File

@@ -1,7 +1,6 @@
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import CustomVariableInput from './CustomVariableInput';
@@ -21,18 +20,12 @@ export interface VariableItemProps {
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function VariableItem({
variableData,
onValueUpdate,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const { name, description, type: variableType } = variableData;
@@ -65,9 +58,6 @@ function VariableItem({
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
)}
{variableType === 'DYNAMIC' && (

View File

@@ -8,14 +8,6 @@ import '@testing-library/jest-dom/extend-expect';
import VariableItem from '../VariableItem';
const mockOnValueUpdate = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
const baseDependencyData = {
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
};
const TEST_VARIABLE_ID = 'test_variable';
const VARIABLE_SELECT_TESTID = 'variable-select';
@@ -31,9 +23,6 @@ const renderVariableItem = (
variableData={variableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={baseDependencyData}
/>
</MockQueryClientProvider>,
);

View File

@@ -2,14 +2,12 @@ import {
buildDependencies,
buildDependencyGraph,
buildParentDependencyGraph,
checkAPIInvocation,
onUpdateVariableNode,
VariableGraph,
} from '../util';
import {
buildDependenciesMock,
buildGraphMock,
checkAPIInvocationMock,
onUpdateVariableNodeMock,
} from './mock';
@@ -72,97 +70,6 @@ describe('dashboardVariables - utilities and processors', () => {
});
});
describe('checkAPIInvocation', () => {
const {
variablesToGetUpdated,
variableData,
parentDependencyGraph,
} = checkAPIInvocationMock;
const mockRootElement = {
name: 'deployment_environment',
key: '036a47cd-9ffc-47de-9f27-0329198964a8',
id: '036a47cd-9ffc-47de-9f27-0329198964a8',
modificationUUID: '5f71b591-f583-497c-839d-6a1590c3f60f',
selectedValue: 'production',
type: 'QUERY',
// ... other properties omitted for brevity
} as any;
describe('edge cases', () => {
it('should return false when variableData is empty', () => {
expect(
checkAPIInvocation(
variablesToGetUpdated,
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return true when parentDependencyGraph is empty', () => {
expect(
checkAPIInvocation(variablesToGetUpdated, variableData, {}),
).toBeFalsy();
});
});
describe('variable sequences', () => {
it('should return true for valid sequence', () => {
expect(
checkAPIInvocation(
['k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return false for invalid sequence', () => {
expect(
checkAPIInvocation(
['k8s_cluster_name', 'k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return false when variableData is not in sequence', () => {
expect(
checkAPIInvocation(
['deployment_environment', 'service_name', 'endpoint'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
});
describe('root element behavior', () => {
it('should return true for valid root element sequence', () => {
expect(
checkAPIInvocation(
[
'deployment_environment',
'service_name',
'endpoint',
'http_status_code',
],
mockRootElement,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return true for empty variablesToGetUpdated array', () => {
expect(
checkAPIInvocation([], mockRootElement, parentDependencyGraph),
).toBeTruthy();
});
});
});
describe('Graph Building Utilities', () => {
const { graph } = buildGraphMock;
const { variables } = buildDependenciesMock;
@@ -223,6 +130,16 @@ describe('dashboardVariables - utilities and processors', () => {
},
hasCycle: false,
cycleNodes: undefined,
transitiveDescendants: {
deployment_environment: ['service_name', 'endpoint', 'http_status_code'],
endpoint: ['http_status_code'],
environment: [],
http_status_code: [],
k8s_cluster_name: ['k8s_node_name', 'k8s_namespace_name'],
k8s_namespace_name: [],
k8s_node_name: ['k8s_namespace_name'],
service_name: ['endpoint', 'http_status_code'],
},
};
expect(buildDependencyGraph(graph)).toEqual(expected);

View File

@@ -1,36 +1,3 @@
/* eslint-disable sonarjs/no-duplicate-string */
export const checkAPIInvocationMock = {
variablesToGetUpdated: [],
variableData: {
name: 'k8s_node_name',
key: '4d71d385-beaf-4434-8dbf-c62be68049fc',
allSelected: false,
customValue: '',
description: '',
id: '4d71d385-beaf-4434-8dbf-c62be68049fc',
modificationUUID: '77233d3c-96d7-4ccb-aa9d-11b04d563068',
multiSelect: false,
order: 6,
queryValue:
"SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}\nGROUP BY k8s_node_name",
selectedValue: 'gke-signoz-saas-si-consumer-bsc-e2sd4-a6d430fa-gvm2',
showALLOption: false,
sort: 'DISABLED',
textboxValue: '',
type: 'QUERY',
},
parentDependencyGraph: {
deployment_environment: [],
service_name: ['deployment_environment'],
endpoint: ['deployment_environment', 'service_name'],
http_status_code: ['endpoint'],
k8s_cluster_name: [],
environment: [],
k8s_node_name: ['k8s_cluster_name'],
k8s_namespace_name: ['k8s_cluster_name', 'k8s_node_name'],
},
} as any;
export const onUpdateVariableNodeMock = {
nodeToUpdate: 'deployment_environment',
graph: {

View File

@@ -1,9 +1,16 @@
import { OptionData } from 'components/NewSelect/types';
import { isEmpty, isNull } from 'lodash-es';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isEmpty } from 'lodash-es';
import {
IDashboardVariables,
IDependencyData,
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export function areArraysEqual(
@@ -45,30 +52,16 @@ const getDependentVariablesBasedOnVariableName = (
}
return variables
?.map((variable: any) => {
.map((variable) => {
if (variable.type === 'QUERY') {
// Combined pattern for all formats
// {{.variable_name}} - original format
// $variable_name - dollar prefix format
// [[variable_name]] - square bracket format
// {{variable_name}} - without dot format
const patterns = [
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`, // {{.var}}
`\\{\\{\\s*${variableName}\\s*\\}\\}`, // {{var}}
`\\$${variableName}\\b`, // $var
`\\[\\[\\s*${variableName}\\s*\\]\\]`, // [[var]]
];
const combinedRegex = new RegExp(patterns.join('|'));
const queryValue = variable.queryValue || '';
const dependVarReMatch = queryValue.match(combinedRegex);
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
if (textContainsVariableReference(queryValue, variableName)) {
return variable.name;
}
}
return null;
})
.filter((val: string | null) => !isNull(val));
.filter((val): val is string => val !== null);
};
export type VariableGraph = Record<string, string[]>;
@@ -246,10 +239,26 @@ export const buildDependencyGraph = (
const hasCycle = topologicalOrder.length !== Object.keys(dependencies)?.length;
// Pre-compute transitive descendants by walking topological order in reverse.
// Each node's transitive descendants = direct children + their transitive descendants.
const transitiveDescendants: VariableGraph = {};
for (let i = topologicalOrder.length - 1; i >= 0; i--) {
const node = topologicalOrder[i];
const desc = new Set<string>();
for (const child of adjList[node] || []) {
desc.add(child);
for (const d of transitiveDescendants[child] || []) {
desc.add(d);
}
}
transitiveDescendants[node] = Array.from(desc);
}
return {
order: topologicalOrder,
graph: adjList,
parentDependencyGraph: buildParentDependencyGraph(adjList),
transitiveDescendants,
hasCycle,
cycleNodes,
};
@@ -284,33 +293,6 @@ export const onUpdateVariableNode = (
});
};
export const checkAPIInvocation = (
variablesToGetUpdated: string[],
variableData: IDashboardVariable,
parentDependencyGraph?: VariableGraph,
): boolean => {
if (isEmpty(variableData.name)) {
return false;
}
if (isEmpty(parentDependencyGraph)) {
return false;
}
// if no dependency then true
const haveDependency =
parentDependencyGraph?.[variableData.name || '']?.length > 0;
if (!haveDependency) {
return true;
}
// if variable is in the list and has dependency then check if its the top element in the queue then true else false
return (
variablesToGetUpdated.length > 0 &&
variablesToGetUpdated[0] === variableData.name
);
};
export const getOptionsForDynamicVariable = (
normalizedValues: (string | number | boolean)[],
relatedValues: string[],
@@ -375,3 +357,130 @@ export const getSelectValue = (
}
return selectedValue?.toString();
};
/**
* Merges multiple arrays of values into a single deduplicated string array.
*/
export function mergeUniqueStrings(
...arrays: (string | number | boolean)[][]
): string[] {
return [...new Set(arrays.flatMap((arr) => arr.map((v) => v.toString())))];
}
function isEligibleFilterVariable(
variable: IDashboardVariable,
currentVariableId: string,
): boolean {
if (variable.id === currentVariableId) {
return false;
}
if (variable.type !== 'DYNAMIC') {
return false;
}
if (!variable.dynamicVariablesAttribute) {
return false;
}
if (!variable.selectedValue || isEmpty(variable.selectedValue)) {
return false;
}
return !(variable.showALLOption && variable.allSelected);
}
function formatQueryValue(val: string): string {
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val;
}
return `'${val.replace(/'/g, "\\'")}'`;
}
function buildQueryPart(attribute: string, values: string[]): string {
const formatted = values.map(formatQueryValue);
if (formatted.length === 1) {
return `${attribute} = ${formatted[0]}`;
}
return `${attribute} IN [${formatted.join(', ')}]`;
}
/**
* Builds a filter query string from sibling dynamic variables' selected values.
* e.g. `k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'`
*/
export function buildExistingDynamicVariableQuery(
existingVariables: IDashboardVariables | null,
currentVariableId: string,
hasDynamicAttribute: boolean,
): string {
if (!existingVariables || !hasDynamicAttribute) {
return '';
}
const queryParts: string[] = [];
for (const variable of Object.values(existingVariables)) {
// Skip the current variable being processed
if (!isEligibleFilterVariable(variable, currentVariableId)) {
continue;
}
const rawValues = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = rawValues
.filter(
(val): val is string | number | boolean =>
val !== null && val !== undefined && val !== '',
)
.map((val) => val.toString());
if (validValues.length > 0 && variable.dynamicVariablesAttribute) {
queryParts.push(
buildQueryPart(variable.dynamicVariablesAttribute, validValues),
);
}
}
return queryParts.join(' AND ');
}
function isVariableInActiveFetchState(state: string | undefined): boolean {
return state === 'loading' || state === 'revalidating';
}
/**
* Completes or fails a variable's fetch state machine transition.
* No-ops if the variable is not currently in an active fetch state.
*/
export function settleVariableFetch(
name: string | undefined,
outcome: 'complete' | 'failure',
): void {
if (!name) {
return;
}
const currentState = variableFetchStore.getSnapshot().states[name];
if (!isVariableInActiveFetchState(currentState)) {
return;
}
if (outcome === 'complete') {
onVariableFetchComplete(name);
} else {
onVariableFetchFailure(name);
}
}
export function extractErrorMessage(
error: { message?: string } | null,
): string {
if (!error) {
return SOMETHING_WENT_WRONG;
}
return (
error.message ||
'Please make sure configuration is valid and you have required setup and permissions'
);
}

View File

@@ -1,17 +1,11 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableItemProps } from '../VariableItem';
export interface VariableSelectStrategy {
handleChange(params: {
value: string | string[];
variableData: IDashboardVariable;
variableData: VariableItemProps['variableData'];
onValueUpdate: VariableItemProps['onValueUpdate'];
optionsData: (string | number | boolean)[];
allAvailableOptionStrings: string[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}): void;
}

View File

@@ -49,15 +49,11 @@ const mockDashboard = {
// Mock the dashboard provider with stable functions to prevent infinite loops
const mockSetSelectedDashboard = jest.fn();
const mockUpdateLocalStorageDashboardVariables = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): any => ({
selectedDashboard: mockDashboard,
setSelectedDashboard: mockSetSelectedDashboard,
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
variablesToGetUpdated: ['env'], // Stable initial value
setVariablesToGetUpdated: mockSetVariablesToGetUpdated,
}),
}));

View File

@@ -6,10 +6,12 @@ import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableReference';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';
@@ -53,7 +55,6 @@ function GridCardGraph({
customOnRowClick,
customTimeRangeWindowForCoRelation,
enableDrillDown,
widgetsByDynamicVariableId,
}: GridCardGraphProps): JSX.Element {
const dispatch = useDispatch();
const [errorMessage, setErrorMessage] = useState<string>();
@@ -64,8 +65,8 @@ function GridCardGraph({
toScrollWidgetId,
setToScrollWidgetId,
setDashboardQueryRangeCalled,
variablesToGetUpdated,
} = useDashboard();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -117,10 +118,25 @@ function GridCardGraph({
const updatedQuery = widget?.query;
const referencedVariableNames = useMemo(() => {
if (!variables || !updatedQuery) {
return [];
}
const allNames = Object.values(variables)
.map((v) => v.name)
.filter((name): name is string => !!name);
return getVariableReferencesInQuery(updatedQuery, allNames);
}, [updatedQuery, variables]);
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
const isPanelWaitingOnAnyVariable = useIsPanelWaitingOnVariable(
referencedVariableNames,
);
const queryEnabledCondition =
isVisible && !isEmptyWidget && isQueryEnabled && !isPanelWaitingOnAnyVariable;
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
if (widget.panelTypes !== PANEL_TYPES.LIST) {
@@ -177,27 +193,6 @@ function GridCardGraph({
[requestData.query],
);
// Bring back dependency on variable chaining for panels to refetch,
// but only for non-dynamic variables. We derive a stable token from
// the head of the variablesToGetUpdated queue when it's non-dynamic.
const nonDynamicVariableChainToken = useMemo(() => {
if (!variablesToGetUpdated || variablesToGetUpdated.length === 0) {
return undefined;
}
if (!variables) {
return undefined;
}
const headName = variablesToGetUpdated[0];
const variableObj = Object.values(variables).find(
(variable) => variable?.name === headName,
);
if (variableObj && variableObj.type !== 'DYNAMIC') {
return headName;
}
return undefined;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variablesToGetUpdated, variables]);
const queryResponse = useGetQueryRange(
{
...requestData,
@@ -224,11 +219,7 @@ function GridCardGraph({
requestData,
variables
? Object.entries(variables).reduce((acc, [id, variable]) => {
if (
variable.type !== 'DYNAMIC' ||
(widgetsByDynamicVariableId?.[variable.id] &&
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
) {
if (variable.name && referencedVariableNames.includes(variable.name)) {
return { ...acc, [id]: variable.selectedValue };
}
return acc;
@@ -237,9 +228,6 @@ function GridCardGraph({
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
? [customTimeRange.startTime, customTimeRange.endTime]
: []),
// Include non-dynamic variable chaining token to drive refetches
// only when a non-dynamic variable is at the head of the queue
...(nonDynamicVariableChainToken ? [nonDynamicVariableChainToken] : []),
],
retry(failureCount, error): boolean {
if (
@@ -252,7 +240,8 @@ function GridCardGraph({
return failureCount < 2;
},
keepPreviousData: true,
enabled: queryEnabledCondition && !nonDynamicVariableChainToken,
// eslint-disable-next-line sonarjs/no-redundant-boolean
enabled: queryEnabledCondition,
refetchOnMount: false,
onError: (error) => {
const errorMessage =
@@ -319,7 +308,7 @@ function GridCardGraph({
threshold={threshold}
headerMenuList={menuList}
isFetchingResponse={
queryResponse.isFetching || variablesToGetUpdated.length > 0
queryResponse.isFetching || isPanelWaitingOnAnyVariable
}
setRequestData={setRequestData}
onClickHandler={onClickHandler}

View File

@@ -72,7 +72,6 @@ export interface GridCardGraphProps {
customOnRowClick?: (record: RowData) => void;
customTimeRangeWindowForCoRelation?: string | undefined;
enableDrillDown?: boolean;
widgetsByDynamicVariableId?: Record<string, string[]>;
}
export interface GetGraphVisibilityStateOnLegendClickProps {

View File

@@ -16,7 +16,6 @@ import { themeColors } from 'constants/theme';
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
import useComponentPermission from 'hooks/useComponentPermission';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
@@ -102,8 +101,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
Record<string, { widgets: Layout[]; collapsed: boolean }>
>({});
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
useEffect(() => {
setCurrentPanelMap(panelMap);
}, [panelMap]);
@@ -617,7 +614,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
onDragSelect={onDragSelect}
dataAvailable={checkIfDataExists}
enableDrillDown={enableDrillDown}
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
/>
</Card>
</CardContainer>

View File

@@ -0,0 +1,153 @@
import { useCallback, useMemo, useRef, useSyncExternalStore } from 'react';
import isEmpty from 'lodash-es/isEmpty';
import {
IVariableFetchStoreState,
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { useDashboardVariablesSelector } from './useDashboardVariables';
/**
* Generic selector hook for the variable fetch store.
* Same pattern as useDashboardVariablesSelector.
*/
const useVariableFetchSelector = <T>(
selector: (state: IVariableFetchStoreState) => T,
): T => {
const selectorRef = useRef(selector);
selectorRef.current = selector;
const getSnapshot = useCallback(
() => selectorRef.current(variableFetchStore.getSnapshot()),
[],
);
return useSyncExternalStore(variableFetchStore.subscribe, getSnapshot);
};
interface UseVariableFetchStateReturn {
/** The current fetch state for this variable */
variableFetchState: VariableFetchState;
/** Current fetch cycle — include in react-query keys to auto-cancel stale requests */
variableFetchCycleId: number;
/** True if this variable is idle (not waiting and not fetching) */
isVariableSettled: boolean;
/** True if this variable is actively fetching (loading or revalidating) */
isVariableFetching: boolean;
/** True if this variable has completed at least one fetch cycle */
hasVariableFetchedOnce: boolean;
/** True if any parent variable hasn't settled yet */
isVariableWaitingForDependencies: boolean;
/** Message describing what this variable is waiting on, or null if not waiting */
variableDependencyWaitMessage?: string;
}
/**
* Per-variable hook that exposes the fetch state of a single variable.
* Reusable by both variable input components and panel components.
*
* Subscribes to both variableFetchStore (for states) and
* dashboardVariablesStore (for parent graph) to compute derived values.
*/
export function useVariableFetchState(
variableName: string,
): UseVariableFetchStateReturn {
// This variable's fetch state (loading, waiting, idle, etc.)
const variableFetchState = useVariableFetchSelector(
(s) => s.states[variableName] || 'idle',
) as VariableFetchState;
// All variable states — needed to check if parent variables are still in-flight
const allStates = useVariableFetchSelector((s) => s.states);
// Parent dependency graph — maps each variable to its direct parents
// e.g. { "childVariable": ["parentVariable"] } means "childVariable" depends on "parentVariable"
const parentGraph = useDashboardVariablesSelector(
(s) => s.dependencyData?.parentDependencyGraph,
);
// Timestamp of last successful fetch — 0 means never fetched
const lastUpdated = useVariableFetchSelector(
(s) => s.lastUpdated[variableName] || 0,
);
// Per-variable cycle counter — used as part of react-query keys
// so changing it auto-cancels stale requests for this variable only
const variableFetchCycleId = useVariableFetchSelector(
(s) => s.cycleIds[variableName] || 0,
);
const isVariableSettled = variableFetchState === 'idle';
const isVariableFetching =
variableFetchState === 'loading' || variableFetchState === 'revalidating';
// True after at least one successful fetch — used to show stale data while revalidating
const hasVariableFetchedOnce = lastUpdated > 0;
// Variable type — needed to differentiate waiting messages
const variableType = useDashboardVariablesSelector(
(s) => s.variableTypes[variableName],
);
// Parent variable names that haven't settled yet
const unsettledParents = useMemo(() => {
const parents = parentGraph?.[variableName] || [];
return parents.filter((p) => (allStates[p] || 'idle') !== 'idle');
}, [parentGraph, variableName, allStates]);
const isVariableWaitingForDependencies = unsettledParents.length > 0;
const variableDependencyWaitMessage = useMemo(() => {
if (variableFetchState !== 'waiting') {
return;
}
if (variableType === 'DYNAMIC') {
return 'Waiting for all query variable options to load.';
}
if (unsettledParents.length === 0) {
return;
}
const quoted = unsettledParents.map((p) => `"${p}"`);
const names =
quoted.length > 1
? `${quoted.slice(0, -1).join(', ')} and ${quoted[quoted.length - 1]}`
: quoted[0];
return `Waiting for options of ${names} to load.`;
}, [variableFetchState, variableType, unsettledParents]);
return {
variableFetchState,
isVariableSettled,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
isVariableFetching,
hasVariableFetchedOnce,
variableFetchCycleId,
};
}
export function useIsPanelWaitingOnVariable(variableNames: string[]): boolean {
const states = useVariableFetchSelector((s) => s.states);
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
const variableTypesMap = useDashboardVariablesSelector((s) => s.variableTypes);
return variableNames.some((name) => {
const variableFetchState = states[name];
const { selectedValue, allSelected } = dashboardVariables?.[name] || {};
const isVariableInFetchingOrWaitingState =
variableFetchState === 'loading' ||
variableFetchState === 'revalidating' ||
variableFetchState === 'waiting';
if (variableTypesMap[name] === 'DYNAMIC' && allSelected) {
return isVariableInFetchingOrWaitingState;
}
return isEmpty(selectedValue) ? isVariableInFetchingOrWaitingState : false;
});
}

View File

@@ -3,6 +3,7 @@ import { TelemetryFieldKey } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { placeWidgetAtBottom } from 'container/NewWidget/utils';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isArray } from 'lodash-es';
import {
Dashboard,
@@ -116,10 +117,17 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
filter.key?.key === variable.dynamicVariablesAttribute &&
((isArray(filter.value) &&
filter.value.includes(`$${variable.name}`)) ||
filter.value === `$${variable.name}`) &&
(isArray(filter.value)
? filter.value.some(
(v) =>
typeof v === 'string' &&
variable.name &&
textContainsVariableReference(v, variable.name),
)
: typeof filter.value === 'string' &&
textContainsVariableReference(filter.value, variable.name)) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -132,7 +140,12 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
queryData.filter?.expression?.includes(`$${variable.name}`) &&
variable.name &&
queryData.filter?.expression &&
textContainsVariableReference(
queryData.filter.expression,
variable.name,
) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -149,7 +162,9 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
promqlQuery.query?.includes(`$${variable.name}`) &&
variable.name &&
promqlQuery.query &&
textContainsVariableReference(promqlQuery.query, variable.name) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -165,7 +180,9 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
clickhouseQuery.query?.includes(`$${variable.name}`) &&
variable.name &&
clickhouseQuery.query &&
textContainsVariableReference(clickhouseQuery.query, variable.name) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);

View File

@@ -84,8 +84,6 @@ const DashboardContext = createContext<IDashboardContext>({
toScrollWidgetId: '',
setToScrollWidgetId: () => {},
updateLocalStorageDashboardVariables: () => {},
variablesToGetUpdated: [],
setVariablesToGetUpdated: () => {},
dashboardQueryRangeCalled: false,
setDashboardQueryRangeCalled: () => {},
selectedRowWidgetId: '',
@@ -183,10 +181,6 @@ export function DashboardProvider({
exact: true,
});
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
[],
);
const [layouts, setLayouts] = useState<Layout[]>([]);
const [panelMap, setPanelMap] = useState<
@@ -517,8 +511,6 @@ export function DashboardProvider({
updatedTimeRef,
setToScrollWidgetId,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,
@@ -541,8 +533,6 @@ export function DashboardProvider({
toScrollWidgetId,
updateLocalStorageDashboardVariables,
currentDashboard,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,

View File

@@ -0,0 +1,527 @@
import * as dashboardVariablesStore from '../dashboardVariables/dashboardVariablesStore';
import { IDependencyData } from '../dashboardVariables/dashboardVariablesStoreTypes';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
onVariableFetchComplete,
onVariableFetchFailure,
VariableFetchContext,
variableFetchStore,
} from '../variableFetchStore';
const getVariableDependencyContextSpy = jest.spyOn(
dashboardVariablesStore,
'getVariableDependencyContext',
);
function resetStore(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
}
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
getVariableDependencyContextSpy.mockReturnValue({
doAllVariablesHaveValuesSelected: false,
variableTypes: {},
dynamicVariableOrder: [],
dependencyData: null,
...overrides,
});
}
/**
* Helper to build a dependency data object for tests.
* Only the fields used by the store actions are required.
*/
function buildDependencyData(
overrides: Partial<IDependencyData> = {},
): IDependencyData {
return {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
...overrides,
};
}
describe('variableFetchStore', () => {
beforeEach(() => {
resetStore();
jest.clearAllMocks();
});
// ==================== initializeVariableFetchStore ====================
describe('initializeVariableFetchStore', () => {
it('should initialize new variables to idle', () => {
initializeVariableFetchStore(['a', 'b', 'c']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' });
});
it('should preserve existing states for known variables', () => {
// Pre-set a state
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
initializeVariableFetchStore(['a', 'b']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('idle');
});
it('should clean up stale variables that no longer exist', () => {
variableFetchStore.update((d) => {
d.states.old = 'idle';
d.lastUpdated.old = 100;
d.cycleIds.old = 3;
});
initializeVariableFetchStore(['a']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.old).toBeUndefined();
expect(storeSnapshot.lastUpdated.old).toBeUndefined();
expect(storeSnapshot.cycleIds.old).toBeUndefined();
expect(storeSnapshot.states.a).toBe('idle');
});
it('should handle empty variable names array', () => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
initializeVariableFetchStore([]);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({});
});
});
// ==================== enqueueFetchOfAllVariables ====================
describe('enqueueFetchOfAllVariables', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
expect(variableFetchStore.getSnapshot().states.a).toBe('idle');
});
it('should set root query variables to loading and dependent ones to waiting', () => {
// a is root (no parents), b depends on a
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('waiting');
});
it('should set root query variables to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a'],
parentDependencyGraph: { a: [] },
}),
variableTypes: { a: 'QUERY' },
});
// Pre-set lastUpdated so it appears previously fetched
variableFetchStore.update((d) => {
d.lastUpdated.a = 1000;
});
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('revalidating');
});
it('should bump cycle IDs for all enqueued variables', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.cycleIds.a).toBe(1);
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set dynamic variables to waiting when not all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: false,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should set dynamic variables to loading when all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: true,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should not treat non-QUERY parents as query parents', () => {
// b has a CUSTOM parent — shouldn't cause waiting
mockContext({
dependencyData: buildDependencyData({
order: ['b'],
parentDependencyGraph: { b: ['customVar'] },
}),
variableTypes: { b: 'QUERY', customVar: 'CUSTOM' },
});
initializeVariableFetchStore(['b', 'customVar']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
});
});
// ==================== onVariableFetchComplete ====================
describe('onVariableFetchComplete', () => {
it('should set the completed variable to idle with a lastUpdated timestamp', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
const before = Date.now();
onVariableFetchComplete('a');
const after = Date.now();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.lastUpdated.a).toBeGreaterThanOrEqual(before);
expect(storeSnapshot.lastUpdated.a).toBeLessThanOrEqual(after);
});
it('should unblock waiting query-type children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.states.b).toBe('loading');
});
it('should not unblock non-QUERY children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// dyn1 is DYNAMIC, not QUERY, so it should remain waiting
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables are settled', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: [] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should NOT unlock dynamic variables if a query variable is still in-flight', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
});
// ==================== onVariableFetchFailure ====================
describe('onVariableFetchFailure', () => {
it('should set the failed variable to error', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
});
it('should set query-type transitive descendants to idle', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.c = 'waiting';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
expect(storeSnapshot.states.b).toBe('idle');
expect(storeSnapshot.states.c).toBe('idle');
});
it('should not touch non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables settle via error', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('loading');
});
});
// ==================== enqueueDescendantsOfVariable ====================
describe('enqueueDescendantsOfVariable', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('idle');
});
it('should enqueue query-type descendants with all parents settled', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set descendants to waiting when some parents are not settled', () => {
// b depends on both a and c; c is still loading
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'loading';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('waiting');
});
it('should skip non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
parentDependencyGraph: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('a');
// dyn1 is DYNAMIC, so it should not be touched
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('idle');
});
it('should handle chain of descendants: a -> b -> c', () => {
// a -> b -> c, all QUERY
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
parentDependencyGraph: { b: ['a'], c: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// b's parent (a) is idle/settled → loading
expect(storeSnapshot.states.b).toBe('loading');
// c's parent (b) just moved to loading (not settled) → waiting
expect(storeSnapshot.states.c).toBe('waiting');
});
it('should set descendants to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.lastUpdated.b = 1000;
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
});
});
});

View File

@@ -0,0 +1,196 @@
import {
IVariableFetchStoreState,
VariableFetchState,
} from '../variableFetchStore';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from '../variableFetchStoreUtils';
describe('variableFetchStoreUtils', () => {
describe('isSettled', () => {
it('should return true for idle state', () => {
expect(isSettled('idle')).toBe(true);
});
it('should return true for error state', () => {
expect(isSettled('error')).toBe(true);
});
it('should return false for loading state', () => {
expect(isSettled('loading')).toBe(false);
});
it('should return false for revalidating state', () => {
expect(isSettled('revalidating')).toBe(false);
});
it('should return false for waiting state', () => {
expect(isSettled('waiting')).toBe(false);
});
it('should return false for undefined', () => {
expect(isSettled(undefined)).toBe(false);
});
});
describe('resolveFetchState', () => {
it('should return "loading" when variable has never been fetched', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: {},
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "loading" when lastUpdated is 0', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 0 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "revalidating" when variable has been fetched before', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 1000 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('revalidating');
});
});
describe('areAllQueryVariablesSettled', () => {
it('should return true when all query variables are idle', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'idle',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when all query variables are in error', () => {
const states: Record<string, VariableFetchState> = {
a: 'error',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true with a mix of idle and error query variables', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return false when any query variable is loading', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'loading',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should return false when any query variable is waiting', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'waiting',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should ignore non-QUERY variable types', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
dynVar: 'loading',
};
const variableTypes = {
a: 'QUERY' as const,
dynVar: 'DYNAMIC' as const,
};
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when there are no QUERY variables', () => {
const states: Record<string, VariableFetchState> = {
dynVar: 'loading',
};
const variableTypes = { dynVar: 'DYNAMIC' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
});
describe('unlockWaitingDynamicVariables', () => {
it('should transition waiting dynamic variables to loading when never fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting', dyn2: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('loading');
expect(draft.states.dyn2).toBe('loading');
});
it('should transition waiting dynamic variables to revalidating when previously fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: { dyn1: 1000 },
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1']);
expect(draft.states.dyn1).toBe('revalidating');
});
it('should not touch dynamic variables that are not in waiting state', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'idle', dyn2: 'loading' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('idle');
expect(draft.states.dyn2).toBe('loading');
});
it('should handle empty dynamic variable order', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, []);
expect(draft.states.dyn1).toBe('waiting');
});
});
});

View File

@@ -0,0 +1,225 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
dashboardVariablesStore,
getVariableDependencyContext,
setDashboardVariablesStore,
updateDashboardVariablesStore,
} from '../dashboardVariablesStore';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
function createVariable(
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable {
return {
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
};
}
function resetStore(): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
describe('dashboardVariablesStore', () => {
beforeEach(() => {
resetStore();
});
describe('setDashboardVariablesStore', () => {
it('should set the dashboard variables and compute derived values', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-1');
expect(storeSnapshot.variables).toEqual(variables);
expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' });
expect(storeSnapshot.sortedVariablesArray).toHaveLength(1);
});
});
describe('updateDashboardVariablesStore', () => {
it('should update variables and recompute derived values', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
},
});
const updatedVariables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
};
updateDashboardVariablesStore({
dashboardId: 'dash-1',
variables: updatedVariables,
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']);
});
it('should replace dashboardId when it does not match', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
'not-there': createVariable({ name: 'not-there', order: 0 }),
},
});
updateDashboardVariablesStore({
dashboardId: 'dash-2',
variables: {
a: createVariable({ name: 'a', order: 0 }),
},
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-2');
expect(storeSnapshot.variableTypes).toEqual({
a: 'QUERY',
});
expect(storeSnapshot.variableTypes).not.toEqual({
'not-there': 'QUERY',
});
});
});
describe('getVariableDependencyContext', () => {
it('should return context with all fields', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
},
});
const {
variableTypes,
dynamicVariableOrder,
dependencyData,
} = getVariableDependencyContext();
expect(variableTypes).toEqual({ env: 'QUERY' });
expect(dynamicVariableOrder).toEqual([]);
expect(dependencyData).not.toBeNull();
});
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: 'us-east',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: undefined,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: null as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: false,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
});
});

View File

@@ -0,0 +1,369 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
import {
buildDynamicVariableOrder,
buildSortedVariablesArray,
buildVariableTypesMap,
computeDerivedValues,
} from '../dashboardVariablesStoreUtils';
const createVariable = (
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable => ({
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
});
describe('dashboardVariablesStoreUtils', () => {
describe('buildSortedVariablesArray', () => {
it('should sort variables by order property', () => {
const variables: IDashboardVariables = {
c: createVariable({ name: 'c', order: 3 }),
a: createVariable({ name: 'a', order: 1 }),
b: createVariable({ name: 'b', order: 2 }),
};
const result = buildSortedVariablesArray(variables);
expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']);
});
it('should return empty array for empty variables', () => {
const result = buildSortedVariablesArray({});
expect(result).toEqual([]);
});
it('should create copies of variables (not references)', () => {
const original = createVariable({ name: 'a', order: 0 });
const variables: IDashboardVariables = { a: original };
const result = buildSortedVariablesArray(variables);
expect(result[0]).not.toBe(original);
expect(result[0]).toEqual(original);
});
});
describe('buildVariableTypesMap', () => {
it('should create a name-to-type mapping', () => {
const sorted = [
createVariable({ name: 'env', type: 'QUERY' }),
createVariable({ name: 'region', type: 'CUSTOM' }),
createVariable({ name: 'dynVar', type: 'DYNAMIC' }),
createVariable({ name: 'text', type: 'TEXTBOX' }),
];
const result = buildVariableTypesMap(sorted);
expect(result).toEqual({
env: 'QUERY',
region: 'CUSTOM',
dynVar: 'DYNAMIC',
text: 'TEXTBOX',
});
});
it('should return empty object for empty array', () => {
expect(buildVariableTypesMap([])).toEqual({});
});
});
describe('buildDynamicVariableOrder', () => {
it('should return only DYNAMIC variable names in order', () => {
const sorted = [
createVariable({ name: 'queryVar', type: 'QUERY', order: 0 }),
createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
createVariable({ name: 'customVar', type: 'CUSTOM', order: 2 }),
createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
];
const result = buildDynamicVariableOrder(sorted);
expect(result).toEqual(['dyn1', 'dyn2']);
});
it('should return empty array when no DYNAMIC variables exist', () => {
const sorted = [
createVariable({ name: 'a', type: 'QUERY' }),
createVariable({ name: 'b', type: 'CUSTOM' }),
];
expect(buildDynamicVariableOrder(sorted)).toEqual([]);
});
it('should return empty array for empty input', () => {
expect(buildDynamicVariableOrder([])).toEqual([]);
});
});
describe('computeDerivedValues', () => {
it('should compute all derived values from variables', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 1,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(2);
expect(result.sortedVariablesArray[0].name).toBe('env');
expect(result.sortedVariablesArray[1].name).toBe('dyn1');
expect(result.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
// dependencyData should exist since there are variables
expect(result.dependencyData).not.toBeNull();
});
it('should return null dependencyData for empty variables', () => {
const result = computeDerivedValues({});
expect(result.sortedVariablesArray).toEqual([]);
expect(result.dependencyData).toBeNull();
expect(result.variableTypes).toEqual({});
expect(result.dynamicVariableOrder).toEqual([]);
});
it('should handle all four variable types together', () => {
const variables: IDashboardVariables = {
queryVar: createVariable({
name: 'queryVar',
type: 'QUERY',
order: 0,
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(4);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'queryVar',
'customVar',
'dynVar',
'textVar',
]);
expect(result.variableTypes).toEqual({
queryVar: 'QUERY',
customVar: 'CUSTOM',
dynVar: 'DYNAMIC',
textVar: 'TEXTBOX',
});
expect(result.dynamicVariableOrder).toEqual(['dynVar']);
expect(result.dependencyData).not.toBeNull();
});
it('should sort variables by order regardless of insertion order', () => {
const variables: IDashboardVariables = {
z: createVariable({ name: 'z', type: 'QUERY', order: 4 }),
a: createVariable({ name: 'a', type: 'CUSTOM', order: 0 }),
m: createVariable({ name: 'm', type: 'DYNAMIC', order: 2 }),
b: createVariable({ name: 'b', type: 'TEXTBOX', order: 1 }),
x: createVariable({ name: 'x', type: 'QUERY', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'a',
'b',
'm',
'x',
'z',
]);
});
it('should include multiple dynamic variables in order', () => {
const variables: IDashboardVariables = {
dyn3: createVariable({ name: 'dyn3', type: 'DYNAMIC', order: 5 }),
query1: createVariable({ name: 'query1', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
custom1: createVariable({ name: 'custom1', type: 'CUSTOM', order: 2 }),
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']);
});
it('should build dependency data with query variable order for dependent queries', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
service: createVariable({
name: 'service',
type: 'QUERY',
order: 1,
queryValue: 'SELECT DISTINCT service FROM table WHERE env={{.env}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData } = result;
expect(dependencyData).not.toBeNull();
// env should appear in the dependency order (it's a root QUERY variable)
expect(dependencyData?.order).toContain('env');
// service depends on env, so it should also be in the order
expect(dependencyData?.order).toContain('service');
// env comes before service in topological order
const envIdx = dependencyData?.order.indexOf('env') ?? -1;
const svcIdx = dependencyData?.order.indexOf('service') ?? -1;
expect(envIdx).toBeLessThan(svcIdx);
});
it('should not include non-QUERY variables in dependency order', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.dependencyData).not.toBeNull();
// Only QUERY variables should be in the dependency order
result.dependencyData?.order.forEach((name) => {
expect(result.variableTypes[name]).toBe('QUERY');
});
});
it('should produce transitive descendants in dependency data', () => {
const variables: IDashboardVariables = {
region: createVariable({
name: 'region',
type: 'QUERY',
order: 0,
queryValue: 'SELECT region FROM table',
}),
cluster: createVariable({
name: 'cluster',
type: 'QUERY',
order: 1,
queryValue: 'SELECT cluster FROM table WHERE region={{.region}}',
}),
host: createVariable({
name: 'host',
type: 'QUERY',
order: 2,
queryValue: 'SELECT host FROM table WHERE cluster={{.cluster}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData: depData } = result;
expect(depData).not.toBeNull();
expect(depData?.transitiveDescendants).toBeDefined();
// region's transitive descendants should include cluster and host
expect(depData?.transitiveDescendants['region']).toEqual(
expect.arrayContaining(['cluster', 'host']),
);
});
it('should handle a single variable', () => {
const variables: IDashboardVariables = {
solo: createVariable({
name: 'solo',
type: 'QUERY',
order: 0,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(1);
expect(result.variableTypes).toEqual({ solo: 'QUERY' });
expect(result.dynamicVariableOrder).toEqual([]);
expect(result.dependencyData).not.toBeNull();
expect(result.dependencyData?.order).toEqual(['solo']);
});
it('should handle only non-QUERY variables', () => {
const variables: IDashboardVariables = {
custom1: createVariable({
name: 'custom1',
type: 'CUSTOM',
order: 0,
}),
text1: createVariable({
name: 'text1',
type: 'TEXTBOX',
order: 1,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 2,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(3);
// No QUERY variables, so dependency order should be empty
expect(result.dependencyData?.order).toEqual([]);
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
});
});
});

View File

@@ -1,4 +1,7 @@
import { isEmpty, isUndefined } from 'lodash-es';
import createStore from '../store';
import { VariableFetchContext } from '../variableFetchStore';
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
import {
computeDerivedValues,
@@ -10,6 +13,8 @@ const initialState: IDashboardVariablesStoreState = {
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
};
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
@@ -55,3 +60,38 @@ export function updateDashboardVariablesStore({
updateDerivedValues(draft);
});
}
/**
* Read current store snapshot as VariableFetchContext.
* Used by components to pass context to variableFetchStore actions
* without creating a circular import.
*/
export function getVariableDependencyContext(): VariableFetchContext {
const state = dashboardVariablesStore.getSnapshot();
// If every variable already has a selectedValue (e.g. persisted from
// localStorage/URL), dynamic variables can start in parallel.
// Otherwise they wait for query vars to settle first.
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
(variable) => {
if (
variable.type === 'DYNAMIC' &&
variable.selectedValue === null &&
variable.allSelected === true
) {
return true;
}
return (
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
);
},
);
return {
doAllVariablesHaveValuesSelected,
variableTypes: state.variableTypes,
dynamicVariableOrder: state.dynamicVariableOrder,
dependencyData: state.dependencyData,
};
}

View File

@@ -1,11 +1,18 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
export type VariableGraph = Record<string, string[]>;
export interface IDependencyData {
order: string[];
// Direct children for each variable
graph: VariableGraph;
// Direct parents for each variable
parentDependencyGraph: VariableGraph;
// Pre-computed transitive descendants for each node (all reachable nodes, not just direct children)
transitiveDescendants: VariableGraph;
hasCycle: boolean;
cycleNodes?: string[];
}
@@ -24,6 +31,12 @@ export interface IDashboardVariablesStoreState {
// Derived: dependency data for QUERY variables
dependencyData: IDependencyData | null;
// Derived: variable name → type mapping
variableTypes: Record<string, TVariableQueryType>;
// Derived: display-ordered list of dynamic variable names
dynamicVariableOrder: string[];
}
export interface IUseDashboardVariablesReturn {

View File

@@ -2,9 +2,11 @@ import {
buildDependencies,
buildDependencyGraph,
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -44,6 +46,7 @@ export function buildDependencyData(
order,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
} = buildDependencyGraph(dependencies);
@@ -58,49 +61,62 @@ export function buildDependencyData(
order: queryVariableOrder,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
};
}
/**
* Initialize the variable fetch store with the computed dependency data
* Build a variable name → type mapping from sorted variables array
*/
function initializeFetchStore(
export function buildVariableTypesMap(
sortedVariablesArray: IDashboardVariable[],
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
): Record<string, TVariableQueryType> {
const types: Record<string, TVariableQueryType> = {};
sortedVariablesArray.forEach((v) => {
if (v.name) {
types[v.name] = v.type;
}
});
return types;
}
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
/**
* Build display-ordered list of dynamic variable names
*/
export function buildDynamicVariableOrder(
sortedVariablesArray: IDashboardVariable[],
): string[] {
return sortedVariablesArray
.filter((v) => v.type === 'DYNAMIC' && v.name)
.map((v) => v.name as string);
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
): Pick<
IDashboardVariablesStoreState,
'sortedVariablesArray' | 'dependencyData'
| 'sortedVariablesArray'
| 'dependencyData'
| 'variableTypes'
| 'dynamicVariableOrder'
> {
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
const variableTypes = buildVariableTypesMap(sortedVariablesArray);
const dynamicVariableOrder = buildDynamicVariableOrder(sortedVariablesArray);
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
return {
sortedVariablesArray,
dependencyData,
variableTypes,
dynamicVariableOrder,
};
}
/**
@@ -112,7 +128,8 @@ export function updateDerivedValues(
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
draft.variableTypes = buildVariableTypesMap(draft.sortedVariablesArray);
draft.dynamicVariableOrder = buildDynamicVariableOrder(
draft.sortedVariablesArray,
);
}

View File

@@ -1,6 +1,12 @@
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { getVariableDependencyContext } from './dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from './dashboardVariables/dashboardVariablesStoreTypes';
import createStore from './store';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from './variableFetchStoreUtils';
// Fetch state for each variable
export type VariableFetchState =
@@ -14,19 +20,29 @@ export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
// Track last update timestamp per variable
lastUpdated: Record<string, number>;
// Per-variable cycle counter — bumped when a variable needs to refetch.
// Used in react-query keys to auto-cancel stale requests for that variable only.
cycleIds: Record<string, number>;
}
/**
* Context from dashboardVariablesStore needed by fetch actions.
* Passed as parameter to avoid circular imports.
*/
export type VariableFetchContext = Pick<
IDashboardVariablesStoreState,
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
> & {
doAllVariablesHaveValuesSelected: boolean;
};
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
cycleIds: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
@@ -36,22 +52,183 @@ export const variableFetchStore = createStore<IVariableFetchStoreState>(
// ============== Actions ==============
/**
* Initialize the store with dependency graphs and set initial states
* Initialize the store with variable names.
* Called when dashboard variables change — sets up state entries.
*/
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
export function initializeVariableFetchStore(variableNames: string[]): void {
variableFetchStore.update((draft) => {
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
// Initialize all variables to idle, preserving existing states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
// Clean up stale entries for variables that no longer exist
const nameSet = new Set(variableNames);
Object.keys(draft.states).forEach((name) => {
if (!nameSet.has(name)) {
delete draft.states[name];
delete draft.lastUpdated[name];
delete draft.cycleIds[name];
}
});
});
}
/**
* Start a full fetch cycle for all fetchable variables.
* Called on: initial load, time range change, or dependency graph change.
*
* Query variables with no query-type parents start immediately.
* Query variables with query-type parents get 'waiting'.
* Dynamic variables start immediately if all variables already have
* selectedValues (e.g. persisted from localStorage/URL). Otherwise they
* wait for all query variables to settle first.
*/
export function enqueueFetchOfAllVariables(): void {
const {
doAllVariablesHaveValuesSelected,
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { order: queryVariableOrder, parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
// Query variables: root ones start immediately, dependent ones wait
queryVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
const parents = parentDependencyGraph[name] || [];
const hasQueryParents = parents.some((p) => variableTypes[p] === 'QUERY');
if (hasQueryParents) {
draft.states[name] = 'waiting';
} else {
draft.states[name] = resolveFetchState(draft, name);
}
});
// Dynamic variables: start immediately if query variables have values,
// otherwise wait for query variables to settle first
dynamicVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
draft.states[name] = doAllVariablesHaveValuesSelected
? resolveFetchState(draft, name)
: 'waiting';
});
});
}
/**
* Mark a variable as completed. Unblocks waiting query-type children.
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchComplete(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'idle';
draft.lastUpdated[name] = Date.now();
if (!dependencyData) {
return;
}
const { graph } = dependencyData;
// Unblock waiting query-type children
const children = graph[name] || [];
children.forEach((child) => {
if (variableTypes[child] === 'QUERY' && draft.states[child] === 'waiting') {
draft.states[child] = resolveFetchState(draft, child);
}
});
// If all query variables are settled, unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Mark a variable as errored. Sets query-type descendants to idle
* (they can't proceed without this parent).
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchFailure(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'error';
if (!dependencyData) {
return;
}
// Set query-type descendants to idle (can't fetch without parent)
const descendants = dependencyData.transitiveDescendants[name] || [];
descendants.forEach((desc) => {
if (variableTypes[desc] === 'QUERY') {
draft.states[desc] = 'idle';
}
});
// If all query variables are settled (error counts), unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Cascade a value change to query-type descendants.
* Called when a user changes a variable's value (not from a fetch cycle).
*
* Direct children whose parents are all settled start immediately.
* Deeper descendants wait until their parents complete (BFS order
* ensures parents are set before children within a single update).
*/
export function enqueueDescendantsOfVariable(name: string): void {
const { dependencyData, variableTypes } = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
const descendants = dependencyData.transitiveDescendants[name] || [];
const queryDescendants = descendants.filter(
(desc) => variableTypes[desc] === 'QUERY',
);
queryDescendants.forEach((desc) => {
draft.cycleIds[desc] = (draft.cycleIds[desc] || 0) + 1;
const parents = parentDependencyGraph[desc] || [];
const allParentsSettled = parents.every((p) => isSettled(draft.states[p]));
draft.states[desc] = allParentsSettled
? resolveFetchState(draft, desc)
: 'waiting';
});
});
}

View File

@@ -0,0 +1,46 @@
import { TVariableQueryType } from 'types/api/dashboard/getAll';
import {
IVariableFetchStoreState,
VariableFetchState,
} from './variableFetchStore';
export function isSettled(state: VariableFetchState | undefined): boolean {
return state === 'idle' || state === 'error';
}
/**
* Resolve the next fetch state based on whether the variable has been fetched before.
*/
export function resolveFetchState(
draft: IVariableFetchStoreState,
name: string,
): VariableFetchState {
return (draft.lastUpdated[name] || 0) > 0 ? 'revalidating' : 'loading';
}
/**
* Check if all query variables are settled (idle or error).
*/
export function areAllQueryVariablesSettled(
states: Record<string, VariableFetchState>,
variableTypes: Record<string, TVariableQueryType>,
): boolean {
return Object.entries(variableTypes)
.filter(([, type]) => type === 'QUERY')
.every(([name]) => isSettled(states[name]));
}
/**
* Transition waiting dynamic variables to loading/revalidating if in 'waiting' state.
*/
export function unlockWaitingDynamicVariables(
draft: IVariableFetchStoreState,
dynamicVariableOrder: string[],
): void {
dynamicVariableOrder.forEach((dynName) => {
if (draft.states[dynName] === 'waiting') {
draft.states[dynName] = resolveFetchState(draft, dynName);
}
});
}

View File

@@ -47,8 +47,6 @@ export interface IDashboardContext {
allSelected: boolean,
isDynamic?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dashboardQueryRangeCalled: boolean;
setDashboardQueryRangeCalled: (value: boolean) => void;
selectedRowWidgetId: string | null;

View File

@@ -3,8 +3,9 @@ package flagger
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
var (
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
)
func MustNewRegistry() featuretypes.Registry {
@@ -17,6 +18,14 @@ func MustNewRegistry() featuretypes.Registry {
DefaultVariant: featuretypes.MustNewName("disabled"),
Variants: featuretypes.NewBooleanVariants(),
},
&featuretypes.Feature{
Name: FeatureInterpolationEnabled,
Kind: featuretypes.KindBoolean,
Stage: featuretypes.StageExperimental,
Description: "Controls whether to enable interpolation",
DefaultVariant: featuretypes.MustNewName("disabled"),
Variants: featuretypes.NewBooleanVariants(),
},
&featuretypes.Feature{
Name: FeatureKafkaSpanEval,
Kind: featuretypes.KindBoolean,

View File

@@ -483,22 +483,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
value1 := v.Visit(values[0])
value2 := v.Visit(values[1])
switch value1.(type) {
case float64:
if _, ok := value2.(float64); !ok {
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
return ""
}
case string:
if _, ok := value2.(string); !ok {
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
return ""
}
default:
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
return ""
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
@@ -871,7 +855,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
// 1. either user meant key ( this is already handled above in fieldKeysForName )
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
// Note:
// Note:
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
// If user only wants to search `key`, then they have to use `key`
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity

View File

@@ -375,6 +375,13 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
}
if os.Getenv("INTERPOLATION_ENABLED") != "" {
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
if config.Flagger.Config.Boolean == nil {
config.Flagger.Config.Boolean = make(map[string]bool)
}
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
}
}
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {

View File

@@ -5,7 +5,6 @@ import (
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -92,9 +91,8 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
return nil, err
} else if frag != "" {
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -130,10 +128,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", []any{}, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -213,11 +208,8 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -286,10 +278,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
@@ -326,23 +315,25 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
case metrictypes.TimeAggregationIncrease:
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
@@ -357,15 +348,7 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
_ uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, []any, error) {
if query.Aggregations[0].SpaceAggregation.IsZero() {
return "", []any{}, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
)
}
) (string, []any) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("ts")
@@ -382,5 +365,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
}

View File

@@ -3,7 +3,6 @@ package telemetrymeter
import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
)
@@ -64,7 +63,7 @@ func AggregationColumnForSamplesTable(
temporality metrictypes.Temporality,
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) (string, error) {
) string {
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
var aggregationColumn string
switch temporality {
@@ -191,13 +190,5 @@ func AggregationColumnForSamplesTable(
}
}
if aggregationColumn == "" {
return "", errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
)
}
return aggregationColumn, nil
return aggregationColumn
}

View File

@@ -29,7 +29,13 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
if operator.IsStringSearchOperator() {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
@@ -38,18 +44,6 @@ func (c *conditionBuilder) conditionFor(
return "", err
}
// todo(srikanthccv): use the same data type collision handling when metrics schemas are updated
switch v := value.(type) {
case float64:
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
case []any:
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
if _, ok := v[0].(float64); ok {
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
}
}
}
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(tblFieldName, value), nil

View File

@@ -5,27 +5,67 @@ import (
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/slices"
)
const (
RateTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))`
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
IncreaseTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value, per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)`
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s, ((%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1) OVER rate_window), (%s - lagInFrame(%s, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))) AS per_series_value`
RateWithInterpolation = `
CASE
WHEN row_number() OVER rate_window = 1 THEN
-- First row: try to interpolate using next value
CASE
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
-- Assume linear growth to next point
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
(leadInFrame(ts, 1) OVER rate_window - ts)
ELSE
0 -- No next value either, can't interpolate
END
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
-- Counter reset detected
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
ELSE
-- Normal case: calculate rate
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
(ts - lagInFrame(ts, 1) OVER rate_window)
END`
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
IncreaseWithInterpolation = `
CASE
WHEN row_number() OVER rate_window = 1 THEN
-- First row: try to interpolate using next value
CASE
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
-- Calculate the interpolated increase for this interval
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
(leadInFrame(ts, 1) OVER rate_window - ts)) *
(leadInFrame(ts, 1) OVER rate_window - ts)
ELSE
0 -- No next value either, can't interpolate
END
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
-- Counter reset detected: the increase is the current value
per_series_value
ELSE
-- Normal case: calculate increase
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
END`
)
type MetricQueryStatementBuilder struct {
@@ -218,9 +258,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
if b.CanShortCircuitDelta(query) {
// spatial_aggregation_cte directly for certain delta queries
if frag, args, err := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); err != nil {
return nil, err
} else if frag != "" {
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -234,9 +273,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
return nil, err
} else if frag != "" {
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -256,7 +294,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
timeSeriesCTE string,
timeSeriesCTEArgs []any,
) (string, []any, error) {
) (string, []any) {
stepSec := int64(query.StepInterval.Seconds())
sb := sqlbuilder.NewSelectBuilder()
@@ -269,13 +307,10 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol, err := AggregationColumnForSamplesTable(
aggCol := AggregationColumnForSamplesTable(
start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints,
)
if err != nil {
return "", []any{}, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -299,7 +334,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
}
func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
@@ -402,10 +437,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", []any{}, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -429,7 +461,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
}
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
_ context.Context,
ctx context.Context,
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
timeSeriesCTE string,
@@ -447,10 +479,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
baseSb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", []any{}, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
@@ -467,25 +496,36 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
// ! TODO (balanikaran) Get OrgID via function parameter instead of valuer.GenerateUUID()
interpolationEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureInterpolationEnabled, featuretypes.NewFlaggerEvaluationContext(valuer.GenerateUUID()))
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
if interpolationEnabled {
rateExpr = RateWithInterpolation
}
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", RateTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
case metrictypes.TimeAggregationIncrease:
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
if interpolationEnabled {
incExpr = IncreaseWithInterpolation
}
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", IncreaseTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
@@ -494,6 +534,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
}
}
// because RateInterpolation is not enabled anywhere due to some gaps in the logic wrt cache handling, it hasn't been considered for the multi temporality
func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
_ context.Context,
start, end uint64,
@@ -512,32 +553,18 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggForDeltaTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", []any{}, err
}
aggForCumulativeTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", []any{}, err
}
aggForDeltaTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggForCumulativeTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggForDeltaTemporality = fmt.Sprintf("%s/%d", aggForDeltaTemporality, stepSec)
}
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality,
aggForDeltaTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality,
)
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, aggForCumulativeTemporality, aggForCumulativeTemporality, start)
sb.SelectMore(rateExpr)
case metrictypes.TimeAggregationIncrease:
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality,
aggForDeltaTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality,
)
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, start)
sb.SelectMore(increaseExpr)
default:
expr := fmt.Sprintf(OthersMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality)
@@ -565,14 +592,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
_ uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, []any, error) {
if query.Aggregations[0].SpaceAggregation.IsZero() {
return "", []any{}, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
)
}
) (string, []any) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("ts")
@@ -589,7 +609,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
}
func (b *MetricQueryStatementBuilder) BuildFinalSelect(

View File

@@ -3,7 +3,6 @@ package telemetrymetrics
import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
)
@@ -169,7 +168,7 @@ func AggregationColumnForSamplesTable(
temporality metrictypes.Temporality,
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) (string, error) {
) string {
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
var aggregationColumn string
switch temporality {
@@ -299,12 +298,5 @@ func AggregationColumnForSamplesTable(
}
}
}
if aggregationColumn == "" {
return "", errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
)
}
return aggregationColumn, nil
return aggregationColumn
}

View File

@@ -35,7 +35,13 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
if operator.IsStringSearchOperator() {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}

View File

@@ -152,9 +152,7 @@ func (f FilterOperator) IsStringSearchOperator() bool {
FilterOperatorILike,
FilterOperatorNotILike,
FilterOperatorLike,
FilterOperatorNotLike,
FilterOperatorRegexp,
FilterOperatorNotRegexp:
FilterOperatorNotLike:
return true
default:
return false

View File

@@ -12,7 +12,6 @@ import (
var (
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
ErrBetweenValuesType = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values of the number type")
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
)

View File

@@ -179,6 +179,14 @@ func (q *QueryBuilderQuery[T]) validateAggregations() error {
aggId,
)
}
// Validate metric-specific aggregations
if err := validateMetricAggregation(v); err != nil {
aggId := fmt.Sprintf("aggregation #%d", i+1)
if q.Name != "" {
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
}
return wrapValidationError(err, aggId, "invalid metric %s: %s")
}
case TraceAggregation:
if v.Expression == "" {
aggId := fmt.Sprintf("aggregation #%d", i+1)
@@ -795,3 +803,85 @@ func validateQueryEnvelope(envelope QueryEnvelope, requestType RequestType) erro
)
}
}
// validateMetricAggregation validates metric-specific aggregation parameters
func validateMetricAggregation(agg MetricAggregation) error {
// we can't decide anything here without known temporality
if agg.Temporality == metrictypes.Unknown {
return nil
}
// Validate that rate/increase are only used with appropriate temporalities
if agg.TimeAggregation == metrictypes.TimeAggregationRate || agg.TimeAggregation == metrictypes.TimeAggregationIncrease {
// For gauge metrics (Unspecified temporality), rate/increase doesn't make sense
if agg.Temporality == metrictypes.Unspecified {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"rate/increase aggregation cannot be used with gauge metrics (unspecified temporality)",
)
}
}
// Validate percentile aggregations are only used with histogram types
if agg.SpaceAggregation.IsPercentile() {
if agg.Type != metrictypes.HistogramType && agg.Type != metrictypes.ExpHistogramType && agg.Type != metrictypes.SummaryType {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"percentile aggregation can only be used with histogram or summary metric types",
)
}
}
// Validate time aggregation values
validTimeAggregations := []metrictypes.TimeAggregation{
metrictypes.TimeAggregationUnspecified,
metrictypes.TimeAggregationLatest,
metrictypes.TimeAggregationSum,
metrictypes.TimeAggregationAvg,
metrictypes.TimeAggregationMin,
metrictypes.TimeAggregationMax,
metrictypes.TimeAggregationCount,
metrictypes.TimeAggregationCountDistinct,
metrictypes.TimeAggregationRate,
metrictypes.TimeAggregationIncrease,
}
validTimeAgg := slices.Contains(validTimeAggregations, agg.TimeAggregation)
if !validTimeAgg {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time aggregation: %s",
agg.TimeAggregation.StringValue(),
).WithAdditional(
"Valid time aggregations: latest, sum, avg, min, max, count, count_distinct, rate, increase",
)
}
// Validate space aggregation values
validSpaceAggregations := []metrictypes.SpaceAggregation{
metrictypes.SpaceAggregationUnspecified,
metrictypes.SpaceAggregationSum,
metrictypes.SpaceAggregationAvg,
metrictypes.SpaceAggregationMin,
metrictypes.SpaceAggregationMax,
metrictypes.SpaceAggregationCount,
metrictypes.SpaceAggregationPercentile50,
metrictypes.SpaceAggregationPercentile75,
metrictypes.SpaceAggregationPercentile90,
metrictypes.SpaceAggregationPercentile95,
metrictypes.SpaceAggregationPercentile99,
}
validSpaceAgg := slices.Contains(validSpaceAggregations, agg.SpaceAggregation)
if !validSpaceAgg {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid space aggregation: %s",
agg.SpaceAggregation.StringValue(),
).WithAdditional(
"Valid space aggregations: sum, avg, min, max, count, p50, p75, p90, p95, p99",
)
}
return nil
}

File diff suppressed because it is too large Load Diff

View File

@@ -54,17 +54,17 @@ def test_rate_with_steady_values_and_reset(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 58
assert len(result_values) >= 59
# the counter reset happened at 31st minute
assert (
result_values[29]["value"] == 0.0167
result_values[30]["value"] == 0.0167
) # i.e 2/120 i.e 29th to 31st minute changes
assert (
result_values[30]["value"] == 0.133
result_values[31]["value"] == 0.133
) # i.e 10/60 i.e 31st to 32nd minute changes
count_of_steady_rate = sum(1 for v in result_values if v["value"] == 0.0833)
assert (
count_of_steady_rate >= 55
count_of_steady_rate >= 56
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:

View File

@@ -21,13 +21,8 @@ from fixtures.querier import (
from fixtures.utils import get_testdata_file_path
MULTI_TEMPORALITY_FILE = get_testdata_file_path("multi_temporality_counters_1h.jsonl")
MULTI_TEMPORALITY_FILE_10h = get_testdata_file_path(
"multi_temporality_counters_10h.jsonl"
)
MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path(
"multi_temporality_counters_24h.jsonl"
)
MULTI_TEMPORALITY_FILE_10h = get_testdata_file_path("multi_temporality_counters_10h.jsonl")
MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path("multi_temporality_counters_24h.jsonl")
@pytest.mark.parametrize(
"time_aggregation, expected_value_at_31st_minute, expected_value_at_32nd_minute, steady_value",
@@ -44,7 +39,7 @@ def test_with_steady_values_and_reset(
time_aggregation: str,
expected_value_at_31st_minute: float,
expected_value_at_32nd_minute: float,
steady_value: float,
steady_value: float
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
@@ -72,24 +67,24 @@ def test_with_steady_values_and_reset(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 58
assert len(result_values) >= 59
# the counter reset happened at 31st minute
# we skip the rate value for the first data point without previous value
assert result_values[29]["value"] == expected_value_at_31st_minute
assert result_values[30]["value"] == expected_value_at_32nd_minute
assert (
result_values[38]["value"] == steady_value
) # 38th minute is when cumulative shifts to delta
result_values[30]["value"] == expected_value_at_31st_minute
)
assert (
result_values[31]["value"] == expected_value_at_32nd_minute
)
assert (
result_values[39]["value"] == steady_value
) # 39th minute is when cumulative shifts to delta
count_of_steady_rate = sum(1 for v in result_values if v["value"] == steady_value)
assert (
count_of_steady_rate >= 55
count_of_steady_rate >= 56
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:
assert (
v["value"] >= 0
), f"{time_aggregation} should not be negative: {v['value']}"
assert v["value"] >= 0, f"{time_aggregation} should not be negative: {v['value']}"
@pytest.mark.parametrize(
"time_aggregation, stable_health_value, stable_products_value, stable_checkout_value, spike_checkout_value, stable_orders_value, spike_users_value",
@@ -166,26 +161,20 @@ def test_group_by_endpoint(
assert (
len(health_values) >= 58
), f"Expected >= 58 values for /health, got {len(health_values)}"
count_steady_health = sum(
1 for v in health_values if v["value"] == stable_health_value
)
count_steady_health = sum(1 for v in health_values if v["value"] == stable_health_value)
assert (
count_steady_health >= 57
), f"Expected >= 57 steady rate values ({stable_health_value}) for /health, got {count_steady_health}"
# all /health rates should be state except possibly first/last due to boundaries
for v in health_values[1:-1]:
assert (
v["value"] == stable_health_value
), f"Expected /health rate {stable_health_value}, got {v['value']}"
assert v["value"] == stable_health_value, f"Expected /health rate {stable_health_value}, got {v['value']}"
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
products_values = endpoint_values["/products"]
assert (
len(products_values) >= 49
), f"Expected >= 49 values for /products, got {len(products_values)}"
count_steady_products = sum(
1 for v in products_values if v["value"] == stable_products_value
)
count_steady_products = sum(1 for v in products_values if v["value"] == stable_products_value)
# most values should be stable, some boundary values differ due to 10-min gap
assert (
@@ -193,9 +182,7 @@ def test_group_by_endpoint(
), f"Expected >= 46 steady rate values ({stable_products_value}) for /products, got {count_steady_products}"
# check that non-stable values are due to gap averaging (should be lower)
gap_boundary_values = [
v["value"] for v in products_values if v["value"] != stable_products_value
]
gap_boundary_values = [v["value"] for v in products_values if v["value"] != stable_products_value]
for val in gap_boundary_values:
assert (
0 < val < stable_products_value
@@ -206,16 +193,12 @@ def test_group_by_endpoint(
assert (
len(checkout_values) >= 59
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
count_steady_checkout = sum(
1 for v in checkout_values if v["value"] == stable_checkout_value
)
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == stable_checkout_value)
assert (
count_steady_checkout >= 53
), f"Expected >= 53 steady {time_aggregation} values ({stable_checkout_value}) for /checkout, got {count_steady_checkout}"
# check that spike values exist (traffic spike +50/min at t40-t44)
count_spike_checkout = sum(
1 for v in checkout_values if v["value"] == spike_checkout_value
)
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == spike_checkout_value)
assert (
count_spike_checkout >= 4
), f"Expected >= 4 spike {time_aggregation} values ({spike_checkout_value}) for /checkout, got {count_spike_checkout}"
@@ -237,16 +220,12 @@ def test_group_by_endpoint(
assert (
len(orders_values) >= 58
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
count_steady_orders = sum(
1 for v in orders_values if v["value"] == stable_orders_value
)
count_steady_orders = sum(1 for v in orders_values if v["value"] == stable_orders_value)
assert (
count_steady_orders >= 55
), f"Expected >= 55 steady {time_aggregation} values ({stable_orders_value}) for /orders, got {count_steady_orders}"
# check for counter reset effects - there should be some non-standard values
non_standard_orders = [
v["value"] for v in orders_values if v["value"] != stable_orders_value
]
non_standard_orders = [v["value"] for v in orders_values if v["value"] != stable_orders_value]
assert (
len(non_standard_orders) >= 2
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
@@ -273,7 +252,6 @@ def test_group_by_endpoint(
count_increment_rate >= 8
), f"Expected >= 8 increment {time_aggregation} values ({spike_users_value}) for /users, got {count_increment_rate}"
@pytest.mark.parametrize(
"time_aggregation, expected_value_at_30th_minute, expected_value_at_31st_minute, value_at_switch",
[
@@ -289,7 +267,7 @@ def test_for_service_with_switch(
time_aggregation: str,
expected_value_at_30th_minute: float,
expected_value_at_31st_minute: float,
value_at_switch: float,
value_at_switch: float
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
@@ -317,19 +295,22 @@ def test_for_service_with_switch(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 59
assert result_values[29]["value"] == expected_value_at_30th_minute # 0.183
assert result_values[30]["value"] == expected_value_at_31st_minute # 0.183
assert result_values[37]["value"] == value_at_switch # 0.25
assert len(result_values) >= 60
assert (
result_values[38]["value"] == value_at_switch # 0.25
) # 39th minute is when cumulative shifts to delta
result_values[30]["value"] == expected_value_at_30th_minute #0.183
)
assert (
result_values[31]["value"] == expected_value_at_31st_minute # 0.183
)
assert (
result_values[38]["value"] == value_at_switch # 0.25
)
assert (
result_values[39]["value"] == value_at_switch # 0.25
) # 39th minute is when cumulative shifts to delta
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:
assert (
v["value"] >= 0
), f"{time_aggregation} should not be negative: {v['value']}"
assert v["value"] >= 0, f"{time_aggregation} should not be negative: {v['value']}"
@pytest.mark.parametrize(
"time_aggregation, expected_value",
@@ -374,7 +355,6 @@ def test_for_week_long_time_range(
for value in result_values[1:]:
assert value["value"] == expected_value
@pytest.mark.parametrize(
"time_aggregation, expected_value",
[