mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-14 13:22:02 +00:00
Compare commits
11 Commits
fix-issues
...
chore/issu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9e4ad75f05 | ||
|
|
3ee79fbe72 | ||
|
|
a4a08b9781 | ||
|
|
8b0f6380ac | ||
|
|
b1857d6fda | ||
|
|
55026caa6a | ||
|
|
e5f7fa93d0 | ||
|
|
915f4cc2c5 | ||
|
|
5aba097c5b | ||
|
|
f0b80402d5 | ||
|
|
8a39a4b622 |
@@ -285,6 +285,7 @@ flagger:
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
|
||||
@@ -53,6 +53,7 @@ describe('VariableItem', () => {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
@@ -75,6 +76,7 @@ describe('VariableItem', () => {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
@@ -98,6 +100,7 @@ describe('VariableItem', () => {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
@@ -139,6 +142,7 @@ describe('VariableItem', () => {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
@@ -169,6 +173,7 @@ describe('VariableItem', () => {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
@@ -191,6 +196,7 @@ describe('VariableItem', () => {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
|
||||
@@ -14,6 +14,7 @@ const baseDependencyData = {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
};
|
||||
|
||||
|
||||
@@ -223,6 +223,16 @@ describe('dashboardVariables - utilities and processors', () => {
|
||||
},
|
||||
hasCycle: false,
|
||||
cycleNodes: undefined,
|
||||
transitiveDescendants: {
|
||||
deployment_environment: ['service_name', 'endpoint', 'http_status_code'],
|
||||
endpoint: ['http_status_code'],
|
||||
environment: [],
|
||||
http_status_code: [],
|
||||
k8s_cluster_name: ['k8s_node_name', 'k8s_namespace_name'],
|
||||
k8s_namespace_name: [],
|
||||
k8s_node_name: ['k8s_namespace_name'],
|
||||
service_name: ['endpoint', 'http_status_code'],
|
||||
},
|
||||
};
|
||||
|
||||
expect(buildDependencyGraph(graph)).toEqual(expected);
|
||||
|
||||
@@ -246,10 +246,26 @@ export const buildDependencyGraph = (
|
||||
|
||||
const hasCycle = topologicalOrder.length !== Object.keys(dependencies)?.length;
|
||||
|
||||
// Pre-compute transitive descendants by walking topological order in reverse.
|
||||
// Each node's transitive descendants = direct children + their transitive descendants.
|
||||
const transitiveDescendants: VariableGraph = {};
|
||||
for (let i = topologicalOrder.length - 1; i >= 0; i--) {
|
||||
const node = topologicalOrder[i];
|
||||
const desc = new Set<string>();
|
||||
for (const child of adjList[node] || []) {
|
||||
desc.add(child);
|
||||
for (const d of transitiveDescendants[child] || []) {
|
||||
desc.add(d);
|
||||
}
|
||||
}
|
||||
transitiveDescendants[node] = Array.from(desc);
|
||||
}
|
||||
|
||||
return {
|
||||
order: topologicalOrder,
|
||||
graph: adjList,
|
||||
parentDependencyGraph: buildParentDependencyGraph(adjList),
|
||||
transitiveDescendants,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
};
|
||||
|
||||
153
frontend/src/hooks/dashboard/useVariableFetchState.ts
Normal file
153
frontend/src/hooks/dashboard/useVariableFetchState.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { useCallback, useMemo, useRef, useSyncExternalStore } from 'react';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
import {
|
||||
IVariableFetchStoreState,
|
||||
VariableFetchState,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
|
||||
import { useDashboardVariablesSelector } from './useDashboardVariables';
|
||||
|
||||
/**
|
||||
* Generic selector hook for the variable fetch store.
|
||||
* Same pattern as useDashboardVariablesSelector.
|
||||
*/
|
||||
const useVariableFetchSelector = <T>(
|
||||
selector: (state: IVariableFetchStoreState) => T,
|
||||
): T => {
|
||||
const selectorRef = useRef(selector);
|
||||
selectorRef.current = selector;
|
||||
|
||||
const getSnapshot = useCallback(
|
||||
() => selectorRef.current(variableFetchStore.getSnapshot()),
|
||||
[],
|
||||
);
|
||||
|
||||
return useSyncExternalStore(variableFetchStore.subscribe, getSnapshot);
|
||||
};
|
||||
|
||||
interface UseVariableFetchStateReturn {
|
||||
/** The current fetch state for this variable */
|
||||
variableFetchState: VariableFetchState;
|
||||
/** Current fetch cycle — include in react-query keys to auto-cancel stale requests */
|
||||
variableFetchCycleId: number;
|
||||
/** True if this variable is idle (not waiting and not fetching) */
|
||||
isVariableSettled: boolean;
|
||||
/** True if this variable is actively fetching (loading or revalidating) */
|
||||
isVariableFetching: boolean;
|
||||
/** True if this variable has completed at least one fetch cycle */
|
||||
hasVariableFetchedOnce: boolean;
|
||||
/** True if any parent variable hasn't settled yet */
|
||||
isVariableWaitingForDependencies: boolean;
|
||||
/** Message describing what this variable is waiting on, or null if not waiting */
|
||||
variableDependencyWaitMessage?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Per-variable hook that exposes the fetch state of a single variable.
|
||||
* Reusable by both variable input components and panel components.
|
||||
*
|
||||
* Subscribes to both variableFetchStore (for states) and
|
||||
* dashboardVariablesStore (for parent graph) to compute derived values.
|
||||
*/
|
||||
export function useVariableFetchState(
|
||||
variableName: string,
|
||||
): UseVariableFetchStateReturn {
|
||||
// This variable's fetch state (loading, waiting, idle, etc.)
|
||||
const variableFetchState = useVariableFetchSelector(
|
||||
(s) => s.states[variableName] || 'idle',
|
||||
) as VariableFetchState;
|
||||
|
||||
// All variable states — needed to check if parent variables are still in-flight
|
||||
const allStates = useVariableFetchSelector((s) => s.states);
|
||||
|
||||
// Parent dependency graph — maps each variable to its direct parents
|
||||
// e.g. { "childVariable": ["parentVariable"] } means "childVariable" depends on "parentVariable"
|
||||
const parentGraph = useDashboardVariablesSelector(
|
||||
(s) => s.dependencyData?.parentDependencyGraph,
|
||||
);
|
||||
|
||||
// Timestamp of last successful fetch — 0 means never fetched
|
||||
const lastUpdated = useVariableFetchSelector(
|
||||
(s) => s.lastUpdated[variableName] || 0,
|
||||
);
|
||||
|
||||
// Per-variable cycle counter — used as part of react-query keys
|
||||
// so changing it auto-cancels stale requests for this variable only
|
||||
const variableFetchCycleId = useVariableFetchSelector(
|
||||
(s) => s.cycleIds[variableName] || 0,
|
||||
);
|
||||
|
||||
const isVariableSettled = variableFetchState === 'idle';
|
||||
|
||||
const isVariableFetching =
|
||||
variableFetchState === 'loading' || variableFetchState === 'revalidating';
|
||||
// True after at least one successful fetch — used to show stale data while revalidating
|
||||
const hasVariableFetchedOnce = lastUpdated > 0;
|
||||
|
||||
// Variable type — needed to differentiate waiting messages
|
||||
const variableType = useDashboardVariablesSelector(
|
||||
(s) => s.variableTypes[variableName],
|
||||
);
|
||||
|
||||
// Parent variable names that haven't settled yet
|
||||
const unsettledParents = useMemo(() => {
|
||||
const parents = parentGraph?.[variableName] || [];
|
||||
return parents.filter((p) => (allStates[p] || 'idle') !== 'idle');
|
||||
}, [parentGraph, variableName, allStates]);
|
||||
|
||||
const isVariableWaitingForDependencies = unsettledParents.length > 0;
|
||||
|
||||
const variableDependencyWaitMessage = useMemo(() => {
|
||||
if (variableFetchState !== 'waiting') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (variableType === 'DYNAMIC') {
|
||||
return 'Waiting for all query variable options to load.';
|
||||
}
|
||||
|
||||
if (unsettledParents.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const quoted = unsettledParents.map((p) => `"${p}"`);
|
||||
const names =
|
||||
quoted.length > 1
|
||||
? `${quoted.slice(0, -1).join(', ')} and ${quoted[quoted.length - 1]}`
|
||||
: quoted[0];
|
||||
return `Waiting for options of ${names} to load.`;
|
||||
}, [variableFetchState, variableType, unsettledParents]);
|
||||
|
||||
return {
|
||||
variableFetchState,
|
||||
isVariableSettled,
|
||||
isVariableWaitingForDependencies,
|
||||
variableDependencyWaitMessage,
|
||||
isVariableFetching,
|
||||
hasVariableFetchedOnce,
|
||||
variableFetchCycleId,
|
||||
};
|
||||
}
|
||||
|
||||
export function useIsPanelWaitingOnVariable(variableNames: string[]): boolean {
|
||||
const states = useVariableFetchSelector((s) => s.states);
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
const variableTypesMap = useDashboardVariablesSelector((s) => s.variableTypes);
|
||||
|
||||
return variableNames.some((name) => {
|
||||
const variableFetchState = states[name];
|
||||
const { selectedValue, allSelected } = dashboardVariables?.[name] || {};
|
||||
|
||||
const isVariableInFetchingOrWaitingState =
|
||||
variableFetchState === 'loading' ||
|
||||
variableFetchState === 'revalidating' ||
|
||||
variableFetchState === 'waiting';
|
||||
|
||||
if (variableTypesMap[name] === 'DYNAMIC' && allSelected) {
|
||||
return isVariableInFetchingOrWaitingState;
|
||||
}
|
||||
|
||||
return isEmpty(selectedValue) ? isVariableInFetchingOrWaitingState : false;
|
||||
});
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { TelemetryFieldKey } from 'api/v5/v5';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
||||
import { placeWidgetAtBottom } from 'container/NewWidget/utils';
|
||||
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
|
||||
import { isArray } from 'lodash-es';
|
||||
import {
|
||||
Dashboard,
|
||||
@@ -116,10 +117,17 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
variable.name &&
|
||||
filter.key?.key === variable.dynamicVariablesAttribute &&
|
||||
((isArray(filter.value) &&
|
||||
filter.value.includes(`$${variable.name}`)) ||
|
||||
filter.value === `$${variable.name}`) &&
|
||||
(isArray(filter.value)
|
||||
? filter.value.some(
|
||||
(v) =>
|
||||
typeof v === 'string' &&
|
||||
variable.name &&
|
||||
textContainsVariableReference(v, variable.name),
|
||||
)
|
||||
: typeof filter.value === 'string' &&
|
||||
textContainsVariableReference(filter.value, variable.name)) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
@@ -132,7 +140,12 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
queryData.filter?.expression?.includes(`$${variable.name}`) &&
|
||||
variable.name &&
|
||||
queryData.filter?.expression &&
|
||||
textContainsVariableReference(
|
||||
queryData.filter.expression,
|
||||
variable.name,
|
||||
) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
@@ -149,7 +162,9 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
promqlQuery.query?.includes(`$${variable.name}`) &&
|
||||
variable.name &&
|
||||
promqlQuery.query &&
|
||||
textContainsVariableReference(promqlQuery.query, variable.name) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
@@ -165,7 +180,9 @@ export const createDynamicVariableToWidgetsMap = (
|
||||
dynamicVariables.forEach((variable) => {
|
||||
if (
|
||||
variable.dynamicVariablesAttribute &&
|
||||
clickhouseQuery.query?.includes(`$${variable.name}`) &&
|
||||
variable.name &&
|
||||
clickhouseQuery.query &&
|
||||
textContainsVariableReference(clickhouseQuery.query, variable.name) &&
|
||||
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
|
||||
) {
|
||||
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
|
||||
|
||||
@@ -0,0 +1,527 @@
|
||||
import * as dashboardVariablesStore from '../dashboardVariables/dashboardVariablesStore';
|
||||
import { IDependencyData } from '../dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
onVariableFetchComplete,
|
||||
onVariableFetchFailure,
|
||||
VariableFetchContext,
|
||||
variableFetchStore,
|
||||
} from '../variableFetchStore';
|
||||
|
||||
const getVariableDependencyContextSpy = jest.spyOn(
|
||||
dashboardVariablesStore,
|
||||
'getVariableDependencyContext',
|
||||
);
|
||||
|
||||
function resetStore(): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
}
|
||||
|
||||
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
|
||||
getVariableDependencyContextSpy.mockReturnValue({
|
||||
doAllVariablesHaveValuesSelected: false,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
dependencyData: null,
|
||||
...overrides,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to build a dependency data object for tests.
|
||||
* Only the fields used by the store actions are required.
|
||||
*/
|
||||
function buildDependencyData(
|
||||
overrides: Partial<IDependencyData> = {},
|
||||
): IDependencyData {
|
||||
return {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('variableFetchStore', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
// ==================== initializeVariableFetchStore ====================
|
||||
|
||||
describe('initializeVariableFetchStore', () => {
|
||||
it('should initialize new variables to idle', () => {
|
||||
initializeVariableFetchStore(['a', 'b', 'c']);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' });
|
||||
});
|
||||
|
||||
it('should preserve existing states for known variables', () => {
|
||||
// Pre-set a state
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a', 'b']);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('loading');
|
||||
expect(storeSnapshot.states.b).toBe('idle');
|
||||
});
|
||||
|
||||
it('should clean up stale variables that no longer exist', () => {
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.old = 'idle';
|
||||
d.lastUpdated.old = 100;
|
||||
d.cycleIds.old = 3;
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a']);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.old).toBeUndefined();
|
||||
expect(storeSnapshot.lastUpdated.old).toBeUndefined();
|
||||
expect(storeSnapshot.cycleIds.old).toBeUndefined();
|
||||
expect(storeSnapshot.states.a).toBe('idle');
|
||||
});
|
||||
|
||||
it('should handle empty variable names array', () => {
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
});
|
||||
|
||||
initializeVariableFetchStore([]);
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== enqueueFetchOfAllVariables ====================
|
||||
|
||||
describe('enqueueFetchOfAllVariables', () => {
|
||||
it('should no-op when dependencyData is null', () => {
|
||||
mockContext({ dependencyData: null });
|
||||
|
||||
initializeVariableFetchStore(['a']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.a).toBe('idle');
|
||||
});
|
||||
|
||||
it('should set root query variables to loading and dependent ones to waiting', () => {
|
||||
// a is root (no parents), b depends on a
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['a', 'b'],
|
||||
parentDependencyGraph: { a: [], b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a', 'b']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('loading');
|
||||
expect(storeSnapshot.states.b).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should set root query variables to revalidating when previously fetched', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['a'],
|
||||
parentDependencyGraph: { a: [] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
// Pre-set lastUpdated so it appears previously fetched
|
||||
variableFetchStore.update((d) => {
|
||||
d.lastUpdated.a = 1000;
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('revalidating');
|
||||
});
|
||||
|
||||
it('should bump cycle IDs for all enqueued variables', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['a', 'b'],
|
||||
parentDependencyGraph: { a: [], b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['a', 'b']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.cycleIds.a).toBe(1);
|
||||
expect(storeSnapshot.cycleIds.b).toBe(1);
|
||||
});
|
||||
|
||||
it('should set dynamic variables to waiting when not all variables have values', () => {
|
||||
mockContext({
|
||||
doAllVariablesHaveValuesSelected: false,
|
||||
dependencyData: buildDependencyData({ order: [] }),
|
||||
variableTypes: { dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['dyn1']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should set dynamic variables to loading when all variables have values', () => {
|
||||
mockContext({
|
||||
doAllVariablesHaveValuesSelected: true,
|
||||
dependencyData: buildDependencyData({ order: [] }),
|
||||
variableTypes: { dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['dyn1']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('loading');
|
||||
});
|
||||
|
||||
it('should not treat non-QUERY parents as query parents', () => {
|
||||
// b has a CUSTOM parent — shouldn't cause waiting
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
order: ['b'],
|
||||
parentDependencyGraph: { b: ['customVar'] },
|
||||
}),
|
||||
variableTypes: { b: 'QUERY', customVar: 'CUSTOM' },
|
||||
});
|
||||
|
||||
initializeVariableFetchStore(['b', 'customVar']);
|
||||
enqueueFetchOfAllVariables();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== onVariableFetchComplete ====================
|
||||
|
||||
describe('onVariableFetchComplete', () => {
|
||||
it('should set the completed variable to idle with a lastUpdated timestamp', () => {
|
||||
mockContext();
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
});
|
||||
|
||||
const before = Date.now();
|
||||
onVariableFetchComplete('a');
|
||||
const after = Date.now();
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('idle');
|
||||
expect(storeSnapshot.lastUpdated.a).toBeGreaterThanOrEqual(before);
|
||||
expect(storeSnapshot.lastUpdated.a).toBeLessThanOrEqual(after);
|
||||
});
|
||||
|
||||
it('should unblock waiting query-type children', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: ['b'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.b = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('idle');
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
});
|
||||
|
||||
it('should not unblock non-QUERY children', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: ['dyn1'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
// dyn1 is DYNAMIC, not QUERY, so it should remain waiting
|
||||
expect(storeSnapshot.states.dyn1).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should unlock waiting dynamic variables when all query variables are settled', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: [] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('loading');
|
||||
});
|
||||
|
||||
it('should NOT unlock dynamic variables if a query variable is still in-flight', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
graph: { a: ['b'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.b = 'waiting';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchComplete('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.dyn1).toBe('waiting');
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== onVariableFetchFailure ====================
|
||||
|
||||
describe('onVariableFetchFailure', () => {
|
||||
it('should set the failed variable to error', () => {
|
||||
mockContext();
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('error');
|
||||
});
|
||||
|
||||
it('should set query-type transitive descendants to idle', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b', 'c'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.b = 'waiting';
|
||||
d.states.c = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.a).toBe('error');
|
||||
expect(storeSnapshot.states.b).toBe('idle');
|
||||
expect(storeSnapshot.states.c).toBe('idle');
|
||||
});
|
||||
|
||||
it('should not touch non-QUERY descendants', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['dyn1'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should unlock waiting dynamic variables when all query variables settle via error', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: {},
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
dynamicVariableOrder: ['dyn1'],
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'loading';
|
||||
d.states.dyn1 = 'waiting';
|
||||
});
|
||||
|
||||
onVariableFetchFailure('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('loading');
|
||||
});
|
||||
});
|
||||
|
||||
// ==================== enqueueDescendantsOfVariable ====================
|
||||
|
||||
describe('enqueueDescendantsOfVariable', () => {
|
||||
it('should no-op when dependencyData is null', () => {
|
||||
mockContext({ dependencyData: null });
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.b).toBe('idle');
|
||||
});
|
||||
|
||||
it('should enqueue query-type descendants with all parents settled', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b'] },
|
||||
parentDependencyGraph: { b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
expect(storeSnapshot.cycleIds.b).toBe(1);
|
||||
});
|
||||
|
||||
it('should set descendants to waiting when some parents are not settled', () => {
|
||||
// b depends on both a and c; c is still loading
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b'] },
|
||||
parentDependencyGraph: { b: ['a', 'c'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
d.states.c = 'loading';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.b).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should skip non-QUERY descendants', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['dyn1'] },
|
||||
parentDependencyGraph: {},
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.dyn1 = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
// dyn1 is DYNAMIC, so it should not be touched
|
||||
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('idle');
|
||||
});
|
||||
|
||||
it('should handle chain of descendants: a -> b -> c', () => {
|
||||
// a -> b -> c, all QUERY
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b', 'c'] },
|
||||
parentDependencyGraph: { b: ['a'], c: ['b'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
d.states.c = 'idle';
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
const storeSnapshot = variableFetchStore.getSnapshot();
|
||||
// b's parent (a) is idle/settled → loading
|
||||
expect(storeSnapshot.states.b).toBe('loading');
|
||||
// c's parent (b) just moved to loading (not settled) → waiting
|
||||
expect(storeSnapshot.states.c).toBe('waiting');
|
||||
});
|
||||
|
||||
it('should set descendants to revalidating when previously fetched', () => {
|
||||
mockContext({
|
||||
dependencyData: buildDependencyData({
|
||||
transitiveDescendants: { a: ['b'] },
|
||||
parentDependencyGraph: { b: ['a'] },
|
||||
}),
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
d.states.b = 'idle';
|
||||
d.lastUpdated.b = 1000;
|
||||
});
|
||||
|
||||
enqueueDescendantsOfVariable('a');
|
||||
|
||||
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,196 @@
|
||||
import {
|
||||
IVariableFetchStoreState,
|
||||
VariableFetchState,
|
||||
} from '../variableFetchStore';
|
||||
import {
|
||||
areAllQueryVariablesSettled,
|
||||
isSettled,
|
||||
resolveFetchState,
|
||||
unlockWaitingDynamicVariables,
|
||||
} from '../variableFetchStoreUtils';
|
||||
|
||||
describe('variableFetchStoreUtils', () => {
|
||||
describe('isSettled', () => {
|
||||
it('should return true for idle state', () => {
|
||||
expect(isSettled('idle')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for error state', () => {
|
||||
expect(isSettled('error')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for loading state', () => {
|
||||
expect(isSettled('loading')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for revalidating state', () => {
|
||||
expect(isSettled('revalidating')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for waiting state', () => {
|
||||
expect(isSettled('waiting')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for undefined', () => {
|
||||
expect(isSettled(undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveFetchState', () => {
|
||||
it('should return "loading" when variable has never been fetched', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
|
||||
});
|
||||
|
||||
it('should return "loading" when lastUpdated is 0', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
lastUpdated: { myVar: 0 },
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
|
||||
});
|
||||
|
||||
it('should return "revalidating" when variable has been fetched before', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
lastUpdated: { myVar: 1000 },
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
expect(resolveFetchState(draft, 'myVar')).toBe('revalidating');
|
||||
});
|
||||
});
|
||||
|
||||
describe('areAllQueryVariablesSettled', () => {
|
||||
it('should return true when all query variables are idle', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'idle',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when all query variables are in error', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'error',
|
||||
b: 'error',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true with a mix of idle and error query variables', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'error',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when any query variable is loading', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'loading',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when any query variable is waiting', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
b: 'waiting',
|
||||
};
|
||||
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
|
||||
});
|
||||
|
||||
it('should ignore non-QUERY variable types', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
a: 'idle',
|
||||
dynVar: 'loading',
|
||||
};
|
||||
const variableTypes = {
|
||||
a: 'QUERY' as const,
|
||||
dynVar: 'DYNAMIC' as const,
|
||||
};
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when there are no QUERY variables', () => {
|
||||
const states: Record<string, VariableFetchState> = {
|
||||
dynVar: 'loading',
|
||||
};
|
||||
const variableTypes = { dynVar: 'DYNAMIC' as const };
|
||||
|
||||
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlockWaitingDynamicVariables', () => {
|
||||
it('should transition waiting dynamic variables to loading when never fetched', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'waiting', dyn2: 'waiting' },
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
|
||||
|
||||
expect(draft.states.dyn1).toBe('loading');
|
||||
expect(draft.states.dyn2).toBe('loading');
|
||||
});
|
||||
|
||||
it('should transition waiting dynamic variables to revalidating when previously fetched', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'waiting' },
|
||||
lastUpdated: { dyn1: 1000 },
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, ['dyn1']);
|
||||
|
||||
expect(draft.states.dyn1).toBe('revalidating');
|
||||
});
|
||||
|
||||
it('should not touch dynamic variables that are not in waiting state', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'idle', dyn2: 'loading' },
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
|
||||
|
||||
expect(draft.states.dyn1).toBe('idle');
|
||||
expect(draft.states.dyn2).toBe('loading');
|
||||
});
|
||||
|
||||
it('should handle empty dynamic variable order', () => {
|
||||
const draft: IVariableFetchStoreState = {
|
||||
states: { dyn1: 'waiting' },
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
unlockWaitingDynamicVariables(draft, []);
|
||||
|
||||
expect(draft.states.dyn1).toBe('waiting');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,225 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
dashboardVariablesStore,
|
||||
getVariableDependencyContext,
|
||||
setDashboardVariablesStore,
|
||||
updateDashboardVariablesStore,
|
||||
} from '../dashboardVariablesStore';
|
||||
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
|
||||
|
||||
function createVariable(
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStore(): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
describe('dashboardVariablesStore', () => {
|
||||
beforeEach(() => {
|
||||
resetStore();
|
||||
});
|
||||
|
||||
describe('setDashboardVariablesStore', () => {
|
||||
it('should set the dashboard variables and compute derived values', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
};
|
||||
|
||||
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
|
||||
|
||||
const storeSnapshot = dashboardVariablesStore.getSnapshot();
|
||||
expect(storeSnapshot.dashboardId).toBe('dash-1');
|
||||
expect(storeSnapshot.variables).toEqual(variables);
|
||||
expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' });
|
||||
expect(storeSnapshot.sortedVariablesArray).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateDashboardVariablesStore', () => {
|
||||
it('should update variables and recompute derived values', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
const updatedVariables: IDashboardVariables = {
|
||||
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
};
|
||||
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: updatedVariables,
|
||||
});
|
||||
|
||||
const storeSnapshot = dashboardVariablesStore.getSnapshot();
|
||||
expect(storeSnapshot.variableTypes).toEqual({
|
||||
env: 'QUERY',
|
||||
dyn1: 'DYNAMIC',
|
||||
});
|
||||
expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']);
|
||||
});
|
||||
|
||||
it('should replace dashboardId when it does not match', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
'not-there': createVariable({ name: 'not-there', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
updateDashboardVariablesStore({
|
||||
dashboardId: 'dash-2',
|
||||
variables: {
|
||||
a: createVariable({ name: 'a', order: 0 }),
|
||||
},
|
||||
});
|
||||
|
||||
const storeSnapshot = dashboardVariablesStore.getSnapshot();
|
||||
expect(storeSnapshot.dashboardId).toBe('dash-2');
|
||||
expect(storeSnapshot.variableTypes).toEqual({
|
||||
a: 'QUERY',
|
||||
});
|
||||
expect(storeSnapshot.variableTypes).not.toEqual({
|
||||
'not-there': 'QUERY',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getVariableDependencyContext', () => {
|
||||
it('should return context with all fields', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const {
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
dependencyData,
|
||||
} = getVariableDependencyContext();
|
||||
|
||||
expect(variableTypes).toEqual({ env: 'QUERY' });
|
||||
expect(dynamicVariableOrder).toEqual([]);
|
||||
expect(dependencyData).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
region: createVariable({
|
||||
name: 'region',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
selectedValue: 'us-east',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(true);
|
||||
});
|
||||
|
||||
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
region: createVariable({
|
||||
name: 'region',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
selectedValue: undefined,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 0,
|
||||
selectedValue: null as any,
|
||||
allSelected: true,
|
||||
}),
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 1,
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(true);
|
||||
});
|
||||
|
||||
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
|
||||
setDashboardVariablesStore({
|
||||
dashboardId: 'dash-1',
|
||||
variables: {
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 0,
|
||||
selectedValue: '',
|
||||
allSelected: false,
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
|
||||
expect(doAllVariablesHaveValuesSelected).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,369 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
|
||||
import {
|
||||
buildDynamicVariableOrder,
|
||||
buildSortedVariablesArray,
|
||||
buildVariableTypesMap,
|
||||
computeDerivedValues,
|
||||
} from '../dashboardVariablesStoreUtils';
|
||||
|
||||
const createVariable = (
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable => ({
|
||||
id: 'test-id',
|
||||
name: 'test-var',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
showALLOption: false,
|
||||
multiSelect: false,
|
||||
order: 0,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
describe('dashboardVariablesStoreUtils', () => {
|
||||
describe('buildSortedVariablesArray', () => {
|
||||
it('should sort variables by order property', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
c: createVariable({ name: 'c', order: 3 }),
|
||||
a: createVariable({ name: 'a', order: 1 }),
|
||||
b: createVariable({ name: 'b', order: 2 }),
|
||||
};
|
||||
|
||||
const result = buildSortedVariablesArray(variables);
|
||||
|
||||
expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']);
|
||||
});
|
||||
|
||||
it('should return empty array for empty variables', () => {
|
||||
const result = buildSortedVariablesArray({});
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it('should create copies of variables (not references)', () => {
|
||||
const original = createVariable({ name: 'a', order: 0 });
|
||||
const variables: IDashboardVariables = { a: original };
|
||||
|
||||
const result = buildSortedVariablesArray(variables);
|
||||
|
||||
expect(result[0]).not.toBe(original);
|
||||
expect(result[0]).toEqual(original);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildVariableTypesMap', () => {
|
||||
it('should create a name-to-type mapping', () => {
|
||||
const sorted = [
|
||||
createVariable({ name: 'env', type: 'QUERY' }),
|
||||
createVariable({ name: 'region', type: 'CUSTOM' }),
|
||||
createVariable({ name: 'dynVar', type: 'DYNAMIC' }),
|
||||
createVariable({ name: 'text', type: 'TEXTBOX' }),
|
||||
];
|
||||
|
||||
const result = buildVariableTypesMap(sorted);
|
||||
|
||||
expect(result).toEqual({
|
||||
env: 'QUERY',
|
||||
region: 'CUSTOM',
|
||||
dynVar: 'DYNAMIC',
|
||||
text: 'TEXTBOX',
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty object for empty array', () => {
|
||||
expect(buildVariableTypesMap([])).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildDynamicVariableOrder', () => {
|
||||
it('should return only DYNAMIC variable names in order', () => {
|
||||
const sorted = [
|
||||
createVariable({ name: 'queryVar', type: 'QUERY', order: 0 }),
|
||||
createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
createVariable({ name: 'customVar', type: 'CUSTOM', order: 2 }),
|
||||
createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
|
||||
];
|
||||
|
||||
const result = buildDynamicVariableOrder(sorted);
|
||||
|
||||
expect(result).toEqual(['dyn1', 'dyn2']);
|
||||
});
|
||||
|
||||
it('should return empty array when no DYNAMIC variables exist', () => {
|
||||
const sorted = [
|
||||
createVariable({ name: 'a', type: 'QUERY' }),
|
||||
createVariable({ name: 'b', type: 'CUSTOM' }),
|
||||
];
|
||||
|
||||
expect(buildDynamicVariableOrder(sorted)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
expect(buildDynamicVariableOrder([])).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('computeDerivedValues', () => {
|
||||
it('should compute all derived values from variables', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
}),
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 1,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(2);
|
||||
expect(result.sortedVariablesArray[0].name).toBe('env');
|
||||
expect(result.sortedVariablesArray[1].name).toBe('dyn1');
|
||||
|
||||
expect(result.variableTypes).toEqual({
|
||||
env: 'QUERY',
|
||||
dyn1: 'DYNAMIC',
|
||||
});
|
||||
|
||||
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
|
||||
|
||||
// dependencyData should exist since there are variables
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should return null dependencyData for empty variables', () => {
|
||||
const result = computeDerivedValues({});
|
||||
|
||||
expect(result.sortedVariablesArray).toEqual([]);
|
||||
expect(result.dependencyData).toBeNull();
|
||||
expect(result.variableTypes).toEqual({});
|
||||
expect(result.dynamicVariableOrder).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle all four variable types together', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
queryVar: createVariable({
|
||||
name: 'queryVar',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
}),
|
||||
customVar: createVariable({
|
||||
name: 'customVar',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
}),
|
||||
dynVar: createVariable({
|
||||
name: 'dynVar',
|
||||
type: 'DYNAMIC',
|
||||
order: 2,
|
||||
}),
|
||||
textVar: createVariable({
|
||||
name: 'textVar',
|
||||
type: 'TEXTBOX',
|
||||
order: 3,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(4);
|
||||
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
|
||||
'queryVar',
|
||||
'customVar',
|
||||
'dynVar',
|
||||
'textVar',
|
||||
]);
|
||||
|
||||
expect(result.variableTypes).toEqual({
|
||||
queryVar: 'QUERY',
|
||||
customVar: 'CUSTOM',
|
||||
dynVar: 'DYNAMIC',
|
||||
textVar: 'TEXTBOX',
|
||||
});
|
||||
|
||||
expect(result.dynamicVariableOrder).toEqual(['dynVar']);
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
});
|
||||
|
||||
it('should sort variables by order regardless of insertion order', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
z: createVariable({ name: 'z', type: 'QUERY', order: 4 }),
|
||||
a: createVariable({ name: 'a', type: 'CUSTOM', order: 0 }),
|
||||
m: createVariable({ name: 'm', type: 'DYNAMIC', order: 2 }),
|
||||
b: createVariable({ name: 'b', type: 'TEXTBOX', order: 1 }),
|
||||
x: createVariable({ name: 'x', type: 'QUERY', order: 3 }),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
|
||||
'a',
|
||||
'b',
|
||||
'm',
|
||||
'x',
|
||||
'z',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should include multiple dynamic variables in order', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
dyn3: createVariable({ name: 'dyn3', type: 'DYNAMIC', order: 5 }),
|
||||
query1: createVariable({ name: 'query1', type: 'QUERY', order: 0 }),
|
||||
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
|
||||
custom1: createVariable({ name: 'custom1', type: 'CUSTOM', order: 2 }),
|
||||
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']);
|
||||
});
|
||||
|
||||
it('should build dependency data with query variable order for dependent queries', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
queryValue: 'SELECT DISTINCT env FROM table',
|
||||
}),
|
||||
service: createVariable({
|
||||
name: 'service',
|
||||
type: 'QUERY',
|
||||
order: 1,
|
||||
queryValue: 'SELECT DISTINCT service FROM table WHERE env={{.env}}',
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
const { dependencyData } = result;
|
||||
expect(dependencyData).not.toBeNull();
|
||||
// env should appear in the dependency order (it's a root QUERY variable)
|
||||
expect(dependencyData?.order).toContain('env');
|
||||
// service depends on env, so it should also be in the order
|
||||
expect(dependencyData?.order).toContain('service');
|
||||
// env comes before service in topological order
|
||||
const envIdx = dependencyData?.order.indexOf('env') ?? -1;
|
||||
const svcIdx = dependencyData?.order.indexOf('service') ?? -1;
|
||||
expect(envIdx).toBeLessThan(svcIdx);
|
||||
});
|
||||
|
||||
it('should not include non-QUERY variables in dependency order', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
env: createVariable({
|
||||
name: 'env',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
queryValue: 'SELECT DISTINCT env FROM table',
|
||||
}),
|
||||
customVar: createVariable({
|
||||
name: 'customVar',
|
||||
type: 'CUSTOM',
|
||||
order: 1,
|
||||
}),
|
||||
dynVar: createVariable({
|
||||
name: 'dynVar',
|
||||
type: 'DYNAMIC',
|
||||
order: 2,
|
||||
}),
|
||||
textVar: createVariable({
|
||||
name: 'textVar',
|
||||
type: 'TEXTBOX',
|
||||
order: 3,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
// Only QUERY variables should be in the dependency order
|
||||
result.dependencyData?.order.forEach((name) => {
|
||||
expect(result.variableTypes[name]).toBe('QUERY');
|
||||
});
|
||||
});
|
||||
|
||||
it('should produce transitive descendants in dependency data', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
region: createVariable({
|
||||
name: 'region',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
queryValue: 'SELECT region FROM table',
|
||||
}),
|
||||
cluster: createVariable({
|
||||
name: 'cluster',
|
||||
type: 'QUERY',
|
||||
order: 1,
|
||||
queryValue: 'SELECT cluster FROM table WHERE region={{.region}}',
|
||||
}),
|
||||
host: createVariable({
|
||||
name: 'host',
|
||||
type: 'QUERY',
|
||||
order: 2,
|
||||
queryValue: 'SELECT host FROM table WHERE cluster={{.cluster}}',
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
const { dependencyData: depData } = result;
|
||||
expect(depData).not.toBeNull();
|
||||
expect(depData?.transitiveDescendants).toBeDefined();
|
||||
// region's transitive descendants should include cluster and host
|
||||
expect(depData?.transitiveDescendants['region']).toEqual(
|
||||
expect.arrayContaining(['cluster', 'host']),
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle a single variable', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
solo: createVariable({
|
||||
name: 'solo',
|
||||
type: 'QUERY',
|
||||
order: 0,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(1);
|
||||
expect(result.variableTypes).toEqual({ solo: 'QUERY' });
|
||||
expect(result.dynamicVariableOrder).toEqual([]);
|
||||
expect(result.dependencyData).not.toBeNull();
|
||||
expect(result.dependencyData?.order).toEqual(['solo']);
|
||||
});
|
||||
|
||||
it('should handle only non-QUERY variables', () => {
|
||||
const variables: IDashboardVariables = {
|
||||
custom1: createVariable({
|
||||
name: 'custom1',
|
||||
type: 'CUSTOM',
|
||||
order: 0,
|
||||
}),
|
||||
text1: createVariable({
|
||||
name: 'text1',
|
||||
type: 'TEXTBOX',
|
||||
order: 1,
|
||||
}),
|
||||
dyn1: createVariable({
|
||||
name: 'dyn1',
|
||||
type: 'DYNAMIC',
|
||||
order: 2,
|
||||
}),
|
||||
};
|
||||
|
||||
const result = computeDerivedValues(variables);
|
||||
|
||||
expect(result.sortedVariablesArray).toHaveLength(3);
|
||||
// No QUERY variables, so dependency order should be empty
|
||||
expect(result.dependencyData?.order).toEqual([]);
|
||||
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,7 @@
|
||||
import { isEmpty, isUndefined } from 'lodash-es';
|
||||
|
||||
import createStore from '../store';
|
||||
import { VariableFetchContext } from '../variableFetchStore';
|
||||
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
|
||||
import {
|
||||
computeDerivedValues,
|
||||
@@ -10,6 +13,8 @@ const initialState: IDashboardVariablesStoreState = {
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
};
|
||||
|
||||
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
|
||||
@@ -55,3 +60,38 @@ export function updateDashboardVariablesStore({
|
||||
updateDerivedValues(draft);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Read current store snapshot as VariableFetchContext.
|
||||
* Used by components to pass context to variableFetchStore actions
|
||||
* without creating a circular import.
|
||||
*/
|
||||
export function getVariableDependencyContext(): VariableFetchContext {
|
||||
const state = dashboardVariablesStore.getSnapshot();
|
||||
|
||||
// If every variable already has a selectedValue (e.g. persisted from
|
||||
// localStorage/URL), dynamic variables can start in parallel.
|
||||
// Otherwise they wait for query vars to settle first.
|
||||
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
|
||||
(variable) => {
|
||||
if (
|
||||
variable.type === 'DYNAMIC' &&
|
||||
variable.selectedValue === null &&
|
||||
variable.allSelected === true
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
doAllVariablesHaveValuesSelected,
|
||||
variableTypes: state.variableTypes,
|
||||
dynamicVariableOrder: state.dynamicVariableOrder,
|
||||
dependencyData: state.dependencyData,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TVariableQueryType,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
export type VariableGraph = Record<string, string[]>;
|
||||
|
||||
export interface IDependencyData {
|
||||
order: string[];
|
||||
// Direct children for each variable
|
||||
graph: VariableGraph;
|
||||
// Direct parents for each variable
|
||||
parentDependencyGraph: VariableGraph;
|
||||
// Pre-computed transitive descendants for each node (all reachable nodes, not just direct children)
|
||||
transitiveDescendants: VariableGraph;
|
||||
hasCycle: boolean;
|
||||
cycleNodes?: string[];
|
||||
}
|
||||
@@ -24,6 +31,12 @@ export interface IDashboardVariablesStoreState {
|
||||
|
||||
// Derived: dependency data for QUERY variables
|
||||
dependencyData: IDependencyData | null;
|
||||
|
||||
// Derived: variable name → type mapping
|
||||
variableTypes: Record<string, TVariableQueryType>;
|
||||
|
||||
// Derived: display-ordered list of dynamic variable names
|
||||
dynamicVariableOrder: string[];
|
||||
}
|
||||
|
||||
export interface IUseDashboardVariablesReturn {
|
||||
|
||||
@@ -2,9 +2,11 @@ import {
|
||||
buildDependencies,
|
||||
buildDependencyGraph,
|
||||
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
IDashboardVariable,
|
||||
TVariableQueryType,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import { initializeVariableFetchStore } from '../variableFetchStore';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDashboardVariablesStoreState,
|
||||
@@ -44,6 +46,7 @@ export function buildDependencyData(
|
||||
order,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
transitiveDescendants,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
} = buildDependencyGraph(dependencies);
|
||||
@@ -58,49 +61,62 @@ export function buildDependencyData(
|
||||
order: queryVariableOrder,
|
||||
graph,
|
||||
parentDependencyGraph,
|
||||
transitiveDescendants,
|
||||
hasCycle,
|
||||
cycleNodes,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the variable fetch store with the computed dependency data
|
||||
* Build a variable name → type mapping from sorted variables array
|
||||
*/
|
||||
function initializeFetchStore(
|
||||
export function buildVariableTypesMap(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
dependencyData: IDependencyData | null,
|
||||
): void {
|
||||
if (dependencyData) {
|
||||
const allVariableNames = sortedVariablesArray
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
): Record<string, TVariableQueryType> {
|
||||
const types: Record<string, TVariableQueryType> = {};
|
||||
sortedVariablesArray.forEach((v) => {
|
||||
if (v.name) {
|
||||
types[v.name] = v.type;
|
||||
}
|
||||
});
|
||||
return types;
|
||||
}
|
||||
|
||||
initializeVariableFetchStore(
|
||||
allVariableNames,
|
||||
dependencyData.graph,
|
||||
dependencyData.parentDependencyGraph,
|
||||
);
|
||||
}
|
||||
/**
|
||||
* Build display-ordered list of dynamic variable names
|
||||
*/
|
||||
export function buildDynamicVariableOrder(
|
||||
sortedVariablesArray: IDashboardVariable[],
|
||||
): string[] {
|
||||
return sortedVariablesArray
|
||||
.filter((v) => v.type === 'DYNAMIC' && v.name)
|
||||
.map((v) => v.name as string);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute derived values from variables
|
||||
* This is a composition of buildSortedVariablesArray and buildDependencyData
|
||||
* Also initializes the variable fetch store with the new dependency data
|
||||
*/
|
||||
export function computeDerivedValues(
|
||||
variables: IDashboardVariablesStoreState['variables'],
|
||||
): Pick<
|
||||
IDashboardVariablesStoreState,
|
||||
'sortedVariablesArray' | 'dependencyData'
|
||||
| 'sortedVariablesArray'
|
||||
| 'dependencyData'
|
||||
| 'variableTypes'
|
||||
| 'dynamicVariableOrder'
|
||||
> {
|
||||
const sortedVariablesArray = buildSortedVariablesArray(variables);
|
||||
const dependencyData = buildDependencyData(sortedVariablesArray);
|
||||
const variableTypes = buildVariableTypesMap(sortedVariablesArray);
|
||||
const dynamicVariableOrder = buildDynamicVariableOrder(sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is computed
|
||||
initializeFetchStore(sortedVariablesArray, dependencyData);
|
||||
|
||||
return { sortedVariablesArray, dependencyData };
|
||||
return {
|
||||
sortedVariablesArray,
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,7 +128,8 @@ export function updateDerivedValues(
|
||||
): void {
|
||||
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
|
||||
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
|
||||
|
||||
// Initialize the variable fetch store when dependency data is updated
|
||||
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
|
||||
draft.variableTypes = buildVariableTypesMap(draft.sortedVariablesArray);
|
||||
draft.dynamicVariableOrder = buildDynamicVariableOrder(
|
||||
draft.sortedVariablesArray,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
|
||||
|
||||
import { getVariableDependencyContext } from './dashboardVariables/dashboardVariablesStore';
|
||||
import { IDashboardVariablesStoreState } from './dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import createStore from './store';
|
||||
import {
|
||||
areAllQueryVariablesSettled,
|
||||
isSettled,
|
||||
resolveFetchState,
|
||||
unlockWaitingDynamicVariables,
|
||||
} from './variableFetchStoreUtils';
|
||||
|
||||
// Fetch state for each variable
|
||||
export type VariableFetchState =
|
||||
@@ -14,19 +20,29 @@ export interface IVariableFetchStoreState {
|
||||
// Per-variable fetch state
|
||||
states: Record<string, VariableFetchState>;
|
||||
|
||||
// Dependency graphs (set once when variables change)
|
||||
dependencyGraph: VariableGraph; // variable -> children that depend on it
|
||||
parentGraph: VariableGraph; // variable -> parents it depends on
|
||||
|
||||
// Track last update timestamp per variable to trigger re-fetches
|
||||
// Track last update timestamp per variable
|
||||
lastUpdated: Record<string, number>;
|
||||
|
||||
// Per-variable cycle counter — bumped when a variable needs to refetch.
|
||||
// Used in react-query keys to auto-cancel stale requests for that variable only.
|
||||
cycleIds: Record<string, number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Context from dashboardVariablesStore needed by fetch actions.
|
||||
* Passed as parameter to avoid circular imports.
|
||||
*/
|
||||
export type VariableFetchContext = Pick<
|
||||
IDashboardVariablesStoreState,
|
||||
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
|
||||
> & {
|
||||
doAllVariablesHaveValuesSelected: boolean;
|
||||
};
|
||||
|
||||
const initialState: IVariableFetchStoreState = {
|
||||
states: {},
|
||||
dependencyGraph: {},
|
||||
parentGraph: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
};
|
||||
|
||||
export const variableFetchStore = createStore<IVariableFetchStoreState>(
|
||||
@@ -36,22 +52,183 @@ export const variableFetchStore = createStore<IVariableFetchStoreState>(
|
||||
// ============== Actions ==============
|
||||
|
||||
/**
|
||||
* Initialize the store with dependency graphs and set initial states
|
||||
* Initialize the store with variable names.
|
||||
* Called when dashboard variables change — sets up state entries.
|
||||
*/
|
||||
export function initializeVariableFetchStore(
|
||||
variableNames: string[],
|
||||
dependencyGraph: VariableGraph,
|
||||
parentGraph: VariableGraph,
|
||||
): void {
|
||||
export function initializeVariableFetchStore(variableNames: string[]): void {
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.dependencyGraph = dependencyGraph;
|
||||
draft.parentGraph = parentGraph;
|
||||
|
||||
// Initialize all variables to idle, preserving existing ready states
|
||||
// Initialize all variables to idle, preserving existing states
|
||||
variableNames.forEach((name) => {
|
||||
if (!draft.states[name]) {
|
||||
draft.states[name] = 'idle';
|
||||
}
|
||||
});
|
||||
|
||||
// Clean up stale entries for variables that no longer exist
|
||||
const nameSet = new Set(variableNames);
|
||||
Object.keys(draft.states).forEach((name) => {
|
||||
if (!nameSet.has(name)) {
|
||||
delete draft.states[name];
|
||||
delete draft.lastUpdated[name];
|
||||
delete draft.cycleIds[name];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a full fetch cycle for all fetchable variables.
|
||||
* Called on: initial load, time range change, or dependency graph change.
|
||||
*
|
||||
* Query variables with no query-type parents start immediately.
|
||||
* Query variables with query-type parents get 'waiting'.
|
||||
* Dynamic variables start immediately if all variables already have
|
||||
* selectedValues (e.g. persisted from localStorage/URL). Otherwise they
|
||||
* wait for all query variables to settle first.
|
||||
*/
|
||||
export function enqueueFetchOfAllVariables(): void {
|
||||
const {
|
||||
doAllVariablesHaveValuesSelected,
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
} = getVariableDependencyContext();
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { order: queryVariableOrder, parentDependencyGraph } = dependencyData;
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
// Query variables: root ones start immediately, dependent ones wait
|
||||
queryVariableOrder.forEach((name) => {
|
||||
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
|
||||
const parents = parentDependencyGraph[name] || [];
|
||||
const hasQueryParents = parents.some((p) => variableTypes[p] === 'QUERY');
|
||||
if (hasQueryParents) {
|
||||
draft.states[name] = 'waiting';
|
||||
} else {
|
||||
draft.states[name] = resolveFetchState(draft, name);
|
||||
}
|
||||
});
|
||||
|
||||
// Dynamic variables: start immediately if query variables have values,
|
||||
// otherwise wait for query variables to settle first
|
||||
dynamicVariableOrder.forEach((name) => {
|
||||
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
|
||||
draft.states[name] = doAllVariablesHaveValuesSelected
|
||||
? resolveFetchState(draft, name)
|
||||
: 'waiting';
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a variable as completed. Unblocks waiting query-type children.
|
||||
* If all query variables are now settled, unlocks any waiting dynamic variables.
|
||||
*/
|
||||
export function onVariableFetchComplete(name: string): void {
|
||||
const {
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
} = getVariableDependencyContext();
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.states[name] = 'idle';
|
||||
draft.lastUpdated[name] = Date.now();
|
||||
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { graph } = dependencyData;
|
||||
|
||||
// Unblock waiting query-type children
|
||||
const children = graph[name] || [];
|
||||
children.forEach((child) => {
|
||||
if (variableTypes[child] === 'QUERY' && draft.states[child] === 'waiting') {
|
||||
draft.states[child] = resolveFetchState(draft, child);
|
||||
}
|
||||
});
|
||||
|
||||
// If all query variables are settled, unlock any waiting dynamic variables
|
||||
if (
|
||||
variableTypes[name] === 'QUERY' &&
|
||||
areAllQueryVariablesSettled(draft.states, variableTypes)
|
||||
) {
|
||||
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Mark a variable as errored. Sets query-type descendants to idle
|
||||
* (they can't proceed without this parent).
|
||||
* If all query variables are now settled, unlocks any waiting dynamic variables.
|
||||
*/
|
||||
export function onVariableFetchFailure(name: string): void {
|
||||
const {
|
||||
dependencyData,
|
||||
variableTypes,
|
||||
dynamicVariableOrder,
|
||||
} = getVariableDependencyContext();
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
draft.states[name] = 'error';
|
||||
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Set query-type descendants to idle (can't fetch without parent)
|
||||
const descendants = dependencyData.transitiveDescendants[name] || [];
|
||||
descendants.forEach((desc) => {
|
||||
if (variableTypes[desc] === 'QUERY') {
|
||||
draft.states[desc] = 'idle';
|
||||
}
|
||||
});
|
||||
|
||||
// If all query variables are settled (error counts), unlock any waiting dynamic variables
|
||||
if (
|
||||
variableTypes[name] === 'QUERY' &&
|
||||
areAllQueryVariablesSettled(draft.states, variableTypes)
|
||||
) {
|
||||
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cascade a value change to query-type descendants.
|
||||
* Called when a user changes a variable's value (not from a fetch cycle).
|
||||
*
|
||||
* Direct children whose parents are all settled start immediately.
|
||||
* Deeper descendants wait until their parents complete (BFS order
|
||||
* ensures parents are set before children within a single update).
|
||||
*/
|
||||
export function enqueueDescendantsOfVariable(name: string): void {
|
||||
const { dependencyData, variableTypes } = getVariableDependencyContext();
|
||||
if (!dependencyData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { parentDependencyGraph } = dependencyData;
|
||||
|
||||
variableFetchStore.update((draft) => {
|
||||
const descendants = dependencyData.transitiveDescendants[name] || [];
|
||||
const queryDescendants = descendants.filter(
|
||||
(desc) => variableTypes[desc] === 'QUERY',
|
||||
);
|
||||
|
||||
queryDescendants.forEach((desc) => {
|
||||
draft.cycleIds[desc] = (draft.cycleIds[desc] || 0) + 1;
|
||||
const parents = parentDependencyGraph[desc] || [];
|
||||
const allParentsSettled = parents.every((p) => isSettled(draft.states[p]));
|
||||
|
||||
draft.states[desc] = allParentsSettled
|
||||
? resolveFetchState(draft, desc)
|
||||
: 'waiting';
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -0,0 +1,46 @@
|
||||
import { TVariableQueryType } from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
IVariableFetchStoreState,
|
||||
VariableFetchState,
|
||||
} from './variableFetchStore';
|
||||
|
||||
export function isSettled(state: VariableFetchState | undefined): boolean {
|
||||
return state === 'idle' || state === 'error';
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve the next fetch state based on whether the variable has been fetched before.
|
||||
*/
|
||||
export function resolveFetchState(
|
||||
draft: IVariableFetchStoreState,
|
||||
name: string,
|
||||
): VariableFetchState {
|
||||
return (draft.lastUpdated[name] || 0) > 0 ? 'revalidating' : 'loading';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if all query variables are settled (idle or error).
|
||||
*/
|
||||
export function areAllQueryVariablesSettled(
|
||||
states: Record<string, VariableFetchState>,
|
||||
variableTypes: Record<string, TVariableQueryType>,
|
||||
): boolean {
|
||||
return Object.entries(variableTypes)
|
||||
.filter(([, type]) => type === 'QUERY')
|
||||
.every(([name]) => isSettled(states[name]));
|
||||
}
|
||||
|
||||
/**
|
||||
* Transition waiting dynamic variables to loading/revalidating if in 'waiting' state.
|
||||
*/
|
||||
export function unlockWaitingDynamicVariables(
|
||||
draft: IVariableFetchStoreState,
|
||||
dynamicVariableOrder: string[],
|
||||
): void {
|
||||
dynamicVariableOrder.forEach((dynName) => {
|
||||
if (draft.states[dynName] === 'waiting') {
|
||||
draft.states[dynName] = resolveFetchState(draft, dynName);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -3,8 +3,9 @@ package flagger
|
||||
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -17,6 +18,14 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureInterpolationEnabled,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether to enable interpolation",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureKafkaSpanEval,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
@@ -483,22 +483,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
value1 := v.Visit(values[0])
|
||||
value2 := v.Visit(values[1])
|
||||
|
||||
switch value1.(type) {
|
||||
case float64:
|
||||
if _, ok := value2.(float64); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
case string:
|
||||
if _, ok := value2.(string); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
default:
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
@@ -871,7 +855,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -375,6 +375,13 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
|
||||
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
|
||||
}
|
||||
|
||||
if os.Getenv("INTERPOLATION_ENABLED") != "" {
|
||||
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
|
||||
if config.Flagger.Config.Boolean == nil {
|
||||
config.Flagger.Config.Boolean = make(map[string]bool)
|
||||
}
|
||||
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
|
||||
}
|
||||
}
|
||||
|
||||
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
@@ -92,9 +91,8 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -130,10 +128,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -213,11 +208,8 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -286,10 +278,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
|
||||
@@ -326,23 +315,25 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -357,15 +348,7 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any, error) {
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", []any{}, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
|
||||
)
|
||||
}
|
||||
) (string, []any) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -382,5 +365,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrymeter
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -64,7 +63,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) (string, error) {
|
||||
) string {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -191,13 +190,5 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
return aggregationColumn
|
||||
}
|
||||
|
||||
@@ -29,7 +29,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
@@ -38,18 +44,6 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
// todo(srikanthccv): use the same data type collision handling when metrics schemas are updated
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
case []any:
|
||||
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
|
||||
if _, ok := v[0].(float64); ok {
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(tblFieldName, value), nil
|
||||
|
||||
@@ -5,27 +5,67 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
const (
|
||||
RateTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))`
|
||||
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
|
||||
IncreaseTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value, per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)`
|
||||
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
|
||||
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s, ((%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
|
||||
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1) OVER rate_window), (%s - lagInFrame(%s, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))) AS per_series_value`
|
||||
RateWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Assume linear growth to next point
|
||||
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected
|
||||
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
ELSE
|
||||
-- Normal case: calculate rate
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
|
||||
(ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
END`
|
||||
|
||||
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
|
||||
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
IncreaseWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Calculate the interpolated increase for this interval
|
||||
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)) *
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected: the increase is the current value
|
||||
per_series_value
|
||||
ELSE
|
||||
-- Normal case: calculate increase
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
|
||||
END`
|
||||
)
|
||||
|
||||
type MetricQueryStatementBuilder struct {
|
||||
@@ -218,9 +258,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
|
||||
if b.CanShortCircuitDelta(query) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -234,9 +273,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -256,7 +294,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
timeSeriesCTEArgs []any,
|
||||
) (string, []any, error) {
|
||||
) (string, []any) {
|
||||
stepSec := int64(query.StepInterval.Seconds())
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -269,13 +307,10 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(
|
||||
aggCol := AggregationColumnForSamplesTable(
|
||||
start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints,
|
||||
)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -299,7 +334,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
@@ -402,10 +437,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -429,7 +461,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
_ context.Context,
|
||||
ctx context.Context,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
@@ -447,10 +479,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
baseSb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
@@ -467,25 +496,36 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
|
||||
// ! TODO (balanikaran) Get OrgID via function parameter instead of valuer.GenerateUUID()
|
||||
interpolationEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureInterpolationEnabled, featuretypes.NewFlaggerEvaluationContext(valuer.GenerateUUID()))
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
|
||||
if interpolationEnabled {
|
||||
rateExpr = RateWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", RateTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
|
||||
if interpolationEnabled {
|
||||
incExpr = IncreaseWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", IncreaseTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -494,6 +534,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
}
|
||||
|
||||
// because RateInterpolation is not enabled anywhere due to some gaps in the logic wrt cache handling, it hasn't been considered for the multi temporality
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
|
||||
_ context.Context,
|
||||
start, end uint64,
|
||||
@@ -512,32 +553,18 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggForDeltaTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
aggForCumulativeTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
}
|
||||
aggForDeltaTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggForCumulativeTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggForDeltaTemporality = fmt.Sprintf("%s/%d", aggForDeltaTemporality, stepSec)
|
||||
}
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality,
|
||||
aggForDeltaTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
)
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, aggForCumulativeTemporality, aggForCumulativeTemporality, start)
|
||||
sb.SelectMore(rateExpr)
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality,
|
||||
aggForDeltaTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
)
|
||||
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, start)
|
||||
sb.SelectMore(increaseExpr)
|
||||
default:
|
||||
expr := fmt.Sprintf(OthersMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality)
|
||||
@@ -565,14 +592,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any, error) {
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", []any{}, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
)
|
||||
}
|
||||
) (string, []any) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -589,7 +609,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrymetrics
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -169,7 +168,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) (string, error) {
|
||||
) string {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -299,12 +298,5 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
}
|
||||
}
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
return aggregationColumn
|
||||
}
|
||||
|
||||
@@ -35,7 +35,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
|
||||
@@ -152,9 +152,7 @@ func (f FilterOperator) IsStringSearchOperator() bool {
|
||||
FilterOperatorILike,
|
||||
FilterOperatorNotILike,
|
||||
FilterOperatorLike,
|
||||
FilterOperatorNotLike,
|
||||
FilterOperatorRegexp,
|
||||
FilterOperatorNotRegexp:
|
||||
FilterOperatorNotLike:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
var (
|
||||
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
|
||||
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
|
||||
ErrBetweenValuesType = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values of the number type")
|
||||
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
|
||||
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
|
||||
)
|
||||
|
||||
@@ -179,6 +179,14 @@ func (q *QueryBuilderQuery[T]) validateAggregations() error {
|
||||
aggId,
|
||||
)
|
||||
}
|
||||
// Validate metric-specific aggregations
|
||||
if err := validateMetricAggregation(v); err != nil {
|
||||
aggId := fmt.Sprintf("aggregation #%d", i+1)
|
||||
if q.Name != "" {
|
||||
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
|
||||
}
|
||||
return wrapValidationError(err, aggId, "invalid metric %s: %s")
|
||||
}
|
||||
case TraceAggregation:
|
||||
if v.Expression == "" {
|
||||
aggId := fmt.Sprintf("aggregation #%d", i+1)
|
||||
@@ -795,3 +803,85 @@ func validateQueryEnvelope(envelope QueryEnvelope, requestType RequestType) erro
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// validateMetricAggregation validates metric-specific aggregation parameters
|
||||
func validateMetricAggregation(agg MetricAggregation) error {
|
||||
// we can't decide anything here without known temporality
|
||||
if agg.Temporality == metrictypes.Unknown {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate that rate/increase are only used with appropriate temporalities
|
||||
if agg.TimeAggregation == metrictypes.TimeAggregationRate || agg.TimeAggregation == metrictypes.TimeAggregationIncrease {
|
||||
// For gauge metrics (Unspecified temporality), rate/increase doesn't make sense
|
||||
if agg.Temporality == metrictypes.Unspecified {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"rate/increase aggregation cannot be used with gauge metrics (unspecified temporality)",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate percentile aggregations are only used with histogram types
|
||||
if agg.SpaceAggregation.IsPercentile() {
|
||||
if agg.Type != metrictypes.HistogramType && agg.Type != metrictypes.ExpHistogramType && agg.Type != metrictypes.SummaryType {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"percentile aggregation can only be used with histogram or summary metric types",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate time aggregation values
|
||||
validTimeAggregations := []metrictypes.TimeAggregation{
|
||||
metrictypes.TimeAggregationUnspecified,
|
||||
metrictypes.TimeAggregationLatest,
|
||||
metrictypes.TimeAggregationSum,
|
||||
metrictypes.TimeAggregationAvg,
|
||||
metrictypes.TimeAggregationMin,
|
||||
metrictypes.TimeAggregationMax,
|
||||
metrictypes.TimeAggregationCount,
|
||||
metrictypes.TimeAggregationCountDistinct,
|
||||
metrictypes.TimeAggregationRate,
|
||||
metrictypes.TimeAggregationIncrease,
|
||||
}
|
||||
|
||||
validTimeAgg := slices.Contains(validTimeAggregations, agg.TimeAggregation)
|
||||
if !validTimeAgg {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation: %s",
|
||||
agg.TimeAggregation.StringValue(),
|
||||
).WithAdditional(
|
||||
"Valid time aggregations: latest, sum, avg, min, max, count, count_distinct, rate, increase",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate space aggregation values
|
||||
validSpaceAggregations := []metrictypes.SpaceAggregation{
|
||||
metrictypes.SpaceAggregationUnspecified,
|
||||
metrictypes.SpaceAggregationSum,
|
||||
metrictypes.SpaceAggregationAvg,
|
||||
metrictypes.SpaceAggregationMin,
|
||||
metrictypes.SpaceAggregationMax,
|
||||
metrictypes.SpaceAggregationCount,
|
||||
metrictypes.SpaceAggregationPercentile50,
|
||||
metrictypes.SpaceAggregationPercentile75,
|
||||
metrictypes.SpaceAggregationPercentile90,
|
||||
metrictypes.SpaceAggregationPercentile95,
|
||||
metrictypes.SpaceAggregationPercentile99,
|
||||
}
|
||||
|
||||
validSpaceAgg := slices.Contains(validSpaceAggregations, agg.SpaceAggregation)
|
||||
if !validSpaceAgg {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation: %s",
|
||||
agg.SpaceAggregation.StringValue(),
|
||||
).WithAdditional(
|
||||
"Valid space aggregations: sum, avg, min, max, count, p50, p75, p90, p95, p99",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -54,17 +54,17 @@ def test_rate_with_steady_values_and_reset(
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) >= 58
|
||||
assert len(result_values) >= 59
|
||||
# the counter reset happened at 31st minute
|
||||
assert (
|
||||
result_values[29]["value"] == 0.0167
|
||||
result_values[30]["value"] == 0.0167
|
||||
) # i.e 2/120 i.e 29th to 31st minute changes
|
||||
assert (
|
||||
result_values[30]["value"] == 0.133
|
||||
result_values[31]["value"] == 0.133
|
||||
) # i.e 10/60 i.e 31st to 32nd minute changes
|
||||
count_of_steady_rate = sum(1 for v in result_values if v["value"] == 0.0833)
|
||||
assert (
|
||||
count_of_steady_rate >= 55
|
||||
count_of_steady_rate >= 56
|
||||
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
|
||||
# All rates should be non-negative (stale periods = 0 rate)
|
||||
for v in result_values:
|
||||
|
||||
@@ -21,13 +21,8 @@ from fixtures.querier import (
|
||||
from fixtures.utils import get_testdata_file_path
|
||||
|
||||
MULTI_TEMPORALITY_FILE = get_testdata_file_path("multi_temporality_counters_1h.jsonl")
|
||||
MULTI_TEMPORALITY_FILE_10h = get_testdata_file_path(
|
||||
"multi_temporality_counters_10h.jsonl"
|
||||
)
|
||||
MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path(
|
||||
"multi_temporality_counters_24h.jsonl"
|
||||
)
|
||||
|
||||
MULTI_TEMPORALITY_FILE_10h = get_testdata_file_path("multi_temporality_counters_10h.jsonl")
|
||||
MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path("multi_temporality_counters_24h.jsonl")
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"time_aggregation, expected_value_at_31st_minute, expected_value_at_32nd_minute, steady_value",
|
||||
@@ -44,7 +39,7 @@ def test_with_steady_values_and_reset(
|
||||
time_aggregation: str,
|
||||
expected_value_at_31st_minute: float,
|
||||
expected_value_at_32nd_minute: float,
|
||||
steady_value: float,
|
||||
steady_value: float
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
@@ -72,24 +67,24 @@ def test_with_steady_values_and_reset(
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) >= 58
|
||||
assert len(result_values) >= 59
|
||||
# the counter reset happened at 31st minute
|
||||
# we skip the rate value for the first data point without previous value
|
||||
assert result_values[29]["value"] == expected_value_at_31st_minute
|
||||
assert result_values[30]["value"] == expected_value_at_32nd_minute
|
||||
assert (
|
||||
result_values[38]["value"] == steady_value
|
||||
) # 38th minute is when cumulative shifts to delta
|
||||
result_values[30]["value"] == expected_value_at_31st_minute
|
||||
)
|
||||
assert (
|
||||
result_values[31]["value"] == expected_value_at_32nd_minute
|
||||
)
|
||||
assert (
|
||||
result_values[39]["value"] == steady_value
|
||||
) # 39th minute is when cumulative shifts to delta
|
||||
count_of_steady_rate = sum(1 for v in result_values if v["value"] == steady_value)
|
||||
assert (
|
||||
count_of_steady_rate >= 55
|
||||
count_of_steady_rate >= 56
|
||||
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
|
||||
# All rates should be non-negative (stale periods = 0 rate)
|
||||
for v in result_values:
|
||||
assert (
|
||||
v["value"] >= 0
|
||||
), f"{time_aggregation} should not be negative: {v['value']}"
|
||||
|
||||
assert v["value"] >= 0, f"{time_aggregation} should not be negative: {v['value']}"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"time_aggregation, stable_health_value, stable_products_value, stable_checkout_value, spike_checkout_value, stable_orders_value, spike_users_value",
|
||||
@@ -166,26 +161,20 @@ def test_group_by_endpoint(
|
||||
assert (
|
||||
len(health_values) >= 58
|
||||
), f"Expected >= 58 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(
|
||||
1 for v in health_values if v["value"] == stable_health_value
|
||||
)
|
||||
count_steady_health = sum(1 for v in health_values if v["value"] == stable_health_value)
|
||||
assert (
|
||||
count_steady_health >= 57
|
||||
), f"Expected >= 57 steady rate values ({stable_health_value}) for /health, got {count_steady_health}"
|
||||
# all /health rates should be state except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert (
|
||||
v["value"] == stable_health_value
|
||||
), f"Expected /health rate {stable_health_value}, got {v['value']}"
|
||||
assert v["value"] == stable_health_value, f"Expected /health rate {stable_health_value}, got {v['value']}"
|
||||
|
||||
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) >= 49
|
||||
), f"Expected >= 49 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(
|
||||
1 for v in products_values if v["value"] == stable_products_value
|
||||
)
|
||||
count_steady_products = sum(1 for v in products_values if v["value"] == stable_products_value)
|
||||
|
||||
# most values should be stable, some boundary values differ due to 10-min gap
|
||||
assert (
|
||||
@@ -193,9 +182,7 @@ def test_group_by_endpoint(
|
||||
), f"Expected >= 46 steady rate values ({stable_products_value}) for /products, got {count_steady_products}"
|
||||
|
||||
# check that non-stable values are due to gap averaging (should be lower)
|
||||
gap_boundary_values = [
|
||||
v["value"] for v in products_values if v["value"] != stable_products_value
|
||||
]
|
||||
gap_boundary_values = [v["value"] for v in products_values if v["value"] != stable_products_value]
|
||||
for val in gap_boundary_values:
|
||||
assert (
|
||||
0 < val < stable_products_value
|
||||
@@ -206,16 +193,12 @@ def test_group_by_endpoint(
|
||||
assert (
|
||||
len(checkout_values) >= 59
|
||||
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(
|
||||
1 for v in checkout_values if v["value"] == stable_checkout_value
|
||||
)
|
||||
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == stable_checkout_value)
|
||||
assert (
|
||||
count_steady_checkout >= 53
|
||||
), f"Expected >= 53 steady {time_aggregation} values ({stable_checkout_value}) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(
|
||||
1 for v in checkout_values if v["value"] == spike_checkout_value
|
||||
)
|
||||
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == spike_checkout_value)
|
||||
assert (
|
||||
count_spike_checkout >= 4
|
||||
), f"Expected >= 4 spike {time_aggregation} values ({spike_checkout_value}) for /checkout, got {count_spike_checkout}"
|
||||
@@ -237,16 +220,12 @@ def test_group_by_endpoint(
|
||||
assert (
|
||||
len(orders_values) >= 58
|
||||
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(
|
||||
1 for v in orders_values if v["value"] == stable_orders_value
|
||||
)
|
||||
count_steady_orders = sum(1 for v in orders_values if v["value"] == stable_orders_value)
|
||||
assert (
|
||||
count_steady_orders >= 55
|
||||
), f"Expected >= 55 steady {time_aggregation} values ({stable_orders_value}) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [
|
||||
v["value"] for v in orders_values if v["value"] != stable_orders_value
|
||||
]
|
||||
non_standard_orders = [v["value"] for v in orders_values if v["value"] != stable_orders_value]
|
||||
assert (
|
||||
len(non_standard_orders) >= 2
|
||||
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
@@ -273,7 +252,6 @@ def test_group_by_endpoint(
|
||||
count_increment_rate >= 8
|
||||
), f"Expected >= 8 increment {time_aggregation} values ({spike_users_value}) for /users, got {count_increment_rate}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"time_aggregation, expected_value_at_30th_minute, expected_value_at_31st_minute, value_at_switch",
|
||||
[
|
||||
@@ -289,7 +267,7 @@ def test_for_service_with_switch(
|
||||
time_aggregation: str,
|
||||
expected_value_at_30th_minute: float,
|
||||
expected_value_at_31st_minute: float,
|
||||
value_at_switch: float,
|
||||
value_at_switch: float
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
@@ -317,19 +295,22 @@ def test_for_service_with_switch(
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) >= 59
|
||||
assert result_values[29]["value"] == expected_value_at_30th_minute # 0.183
|
||||
assert result_values[30]["value"] == expected_value_at_31st_minute # 0.183
|
||||
assert result_values[37]["value"] == value_at_switch # 0.25
|
||||
assert len(result_values) >= 60
|
||||
assert (
|
||||
result_values[38]["value"] == value_at_switch # 0.25
|
||||
) # 39th minute is when cumulative shifts to delta
|
||||
result_values[30]["value"] == expected_value_at_30th_minute #0.183
|
||||
)
|
||||
assert (
|
||||
result_values[31]["value"] == expected_value_at_31st_minute # 0.183
|
||||
)
|
||||
assert (
|
||||
result_values[38]["value"] == value_at_switch # 0.25
|
||||
)
|
||||
assert (
|
||||
result_values[39]["value"] == value_at_switch # 0.25
|
||||
) # 39th minute is when cumulative shifts to delta
|
||||
# All rates should be non-negative (stale periods = 0 rate)
|
||||
for v in result_values:
|
||||
assert (
|
||||
v["value"] >= 0
|
||||
), f"{time_aggregation} should not be negative: {v['value']}"
|
||||
|
||||
assert v["value"] >= 0, f"{time_aggregation} should not be negative: {v['value']}"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"time_aggregation, expected_value",
|
||||
@@ -374,7 +355,6 @@ def test_for_week_long_time_range(
|
||||
for value in result_values[1:]:
|
||||
assert value["value"] == expected_value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"time_aggregation, expected_value",
|
||||
[
|
||||
|
||||
Reference in New Issue
Block a user