Compare commits

...

13 Commits

Author SHA1 Message Date
Srikanth Chekuri
b1f1cd5b54 Merge branch 'main' into fix-issues 2026-02-16 15:11:01 +05:30
srikanthccv
2fa0c94c92 Merge branch 'fix-issues' of github.com:SigNoz/signoz into fix-issues 2026-02-16 14:45:35 +05:30
srikanthccv
312acf16fb chore: update validation.go 2026-02-16 14:45:13 +05:30
Ashwin Bhatkal
8898f02698 chore: variables based panel fetching (#10292)
* chore: replace prop drilling with fetch store

* chore: variables based panel fetching

* chore: add tests

* chore: move tests

* chore: add tests for new hook

* chore: resolve comments
2026-02-16 09:11:06 +00:00
Ashwin Bhatkal
f277009ff8 chore: replace prop drilling with fetch store (#10291)
* chore: shared utils update + API plumbing

* chore: variable fetch state machine

* chore: add tests

* chore: add tests

* chore: move tests

* chore: fix tests

* chore: replace prop drilling with fetch store

* chore: fix types

* chore: add tests for new utils

* chore: resolve comments
2026-02-16 08:50:08 +00:00
Abhi kumar
17c6b79d79 Revert "feat: enabled new bar panel (#10312)" (#10314)
This reverts commit 82dffdda56.
2026-02-16 14:09:19 +05:30
Abhi kumar
76d6c23217 fix: added fix for bar chart width calculation based on stepinterval (#10305) 2026-02-16 13:00:20 +05:30
Abhi kumar
82dffdda56 feat: enabled new bar panel (#10312) 2026-02-16 12:43:45 +05:30
Srikanth Chekuri
e13ea23a5d Merge branch 'main' into fix-issues 2026-02-16 12:25:47 +05:30
srikanthccv
4b117c85e6 chore: update return 2026-02-15 12:44:09 +05:30
srikanthccv
3e1a48acf9 chore: update tests 2026-02-15 12:38:48 +05:30
Srikanth Chekuri
6d282dfe68 Merge branch 'main' into fix-issues 2026-02-14 22:42:14 +05:30
srikanthccv
e3c04b378a fix: update rate/increase query and address several issues in builder queries 2026-02-14 11:02:09 +05:30
65 changed files with 2758 additions and 2029 deletions

View File

@@ -176,25 +176,6 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
### Unsere Projektbetreuer
#### Backend
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
#### Frontend
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
<br /><br />
## Dokumentation

View File

@@ -221,34 +221,6 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
### Project maintainers
#### Backend
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
- [Ekansh Gupta](https://github.com/eKuG)
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
#### Frontend
- [Yunus M](https://github.com/YounixM)
- [Vikrant Gupta](https://github.com/vikrantgupta25)
- [Sagar Rajput](https://github.com/SagarRajput-7)
- [Shaheer Kochai](https://github.com/ahmadshaheer)
- [Amlan Kumar Nandy](https://github.com/amlannandy)
- [Sahil Khan](https://github.com/sawhil)
- [Aditya Singh](https://github.com/aks07)
- [Abhi Kumar](https://github.com/ahrefabhi)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
- [Vibhu Pandey](https://github.com/therealpandey)
<br /><br />

View File

@@ -187,25 +187,6 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
### 项目维护人员
#### 后端
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
#### 前端
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### 运维开发
- [Prashant Shahi](https://github.com/prashant-shahi)
<br /><br />
## 文档

View File

@@ -294,7 +294,6 @@ flagger:
config:
boolean:
use_span_metrics: true
interpolation_enabled: false
kafka_span_eval: false
string:
float:

View File

@@ -73,7 +73,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'time_series',
data: { results: [timeSeries] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
};
const params = makeBaseParams('time_series', [
@@ -156,7 +156,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
};
const params = makeBaseParams('scalar', [
@@ -239,7 +239,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
};
const params = makeBaseParams('scalar', [

View File

@@ -388,6 +388,7 @@ export function convertV5ResponseToLegacy(
warnings: v5Data?.data?.warning || [],
},
warning: v5Data?.warning || undefined,
meta: v5Data?.meta,
},
warning: v5Data?.warning || undefined,
};
@@ -406,6 +407,7 @@ export function convertV5ResponseToLegacy(
payload: {
data: convertedData,
warning: v5Response.payload?.data?.warning || undefined,
meta: v5Data?.meta,
},
};

View File

@@ -78,12 +78,10 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
describe('VariableItem Integration Tests', () => {
let user: ReturnType<typeof userEvent.setup>;
let mockOnValueUpdate: jest.Mock;
let mockSetVariablesToGetUpdated: jest.Mock;
beforeEach(() => {
user = userEvent.setup();
mockOnValueUpdate = jest.fn();
mockSetVariablesToGetUpdated = jest.fn();
jest.clearAllMocks();
});
@@ -102,9 +100,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -150,9 +145,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -195,9 +187,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -247,9 +236,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -272,9 +258,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -308,9 +291,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -344,9 +324,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -369,9 +346,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -405,9 +379,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -461,9 +432,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -508,9 +476,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -548,9 +513,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -582,9 +544,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);

View File

@@ -9,11 +9,15 @@ import {
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
import './DashboardVariableSelection.styles.scss';
@@ -22,8 +26,6 @@ function DashboardVariableSelection(): JSX.Element | null {
const {
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
} = useDashboard();
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
@@ -55,11 +57,14 @@ function DashboardVariableSelection(): JSX.Element | null {
[dependencyData?.order],
);
// Trigger refetch when dependency order changes or global time changes
// Initialize fetch store then start a new fetch cycle.
// Runs on dependency order changes, and time range changes.
useEffect(() => {
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(allVariableNames);
enqueueFetchOfAllVariables();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
@@ -121,29 +126,14 @@ function DashboardVariableSelection(): JSX.Element | null {
return prev;
});
if (dependencyData) {
const updatedVariables: string[] = [];
onUpdateVariableNode(
name,
dependencyData.graph,
dependencyData.order,
(node) => updatedVariables.push(node),
);
setVariablesToGetUpdated((prev) => [
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
]);
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
// Cascade: enqueue query-type descendants for refetching
enqueueDescendantsOfVariable(name);
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
@@ -158,9 +148,6 @@ function DashboardVariableSelection(): JSX.Element | null {
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
);
})}

View File

@@ -2,18 +2,25 @@ import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import useDebounce from 'hooks/useDebounce';
import { isEmpty } from 'lodash-es';
import { AppState } from 'store/reducers';
import { SuccessResponseV2 } from 'types/api';
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { getOptionsForDynamicVariable } from './util';
import {
buildExistingDynamicVariableQuery,
extractErrorMessage,
getOptionsForDynamicVariable,
mergeUniqueStrings,
settleVariableFetch,
} from './util';
import { VariableItemProps } from './VariableItem';
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
@@ -24,7 +31,6 @@ type DynamicVariableInputProps = Pick<
'variableData' | 'onValueUpdate' | 'existingVariables'
>;
// eslint-disable-next-line sonarjs/cognitive-complexity
function DynamicVariableInput({
variableData,
onValueUpdate,
@@ -55,14 +61,8 @@ function DynamicVariableInput({
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
() => mergeUniqueStrings(optionsData, relatedValues),
[optionsData, relatedValues],
);
@@ -104,67 +104,24 @@ function DynamicVariableInput({
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
// eslint-disable-next-line sonarjs/cognitive-complexity
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((val) => val !== null && val !== undefined && val !== '')
.map((val) => val.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((val) => {
// Check if value is a number
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${val.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
const existingQuery = useMemo(
() =>
buildExistingDynamicVariableQuery(
existingVariables,
variableData.id,
!!variableData.dynamicVariablesAttribute,
),
[existingVariables, variableData.id, variableData.dynamicVariablesAttribute],
);
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
const handleSelectDropdownVisibilityChange = useCallback(
@@ -182,6 +139,73 @@ function DynamicVariableInput({
[onDropdownVisibleChange, optionsData, originalRelatedValues],
);
const handleQuerySuccess = useCallback(
(data: SuccessResponseV2<FieldValueResponse>): void => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Sync temp selection with latest API values when ALL is active and dropdown is open
if (variableData.allSelected && isDropdownOpen) {
const latestValues = mergeUniqueStrings(
newNormalizedValues,
newRelatedValues,
);
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
applyDefaultIfNeeded(
mergeUniqueStrings(newNormalizedValues, newRelatedValues),
);
}
settleVariableFetch(variableData.name, 'complete');
},
[
debouncedApiSearchText,
variableData.allSelected,
variableData.name,
isDropdownOpen,
tempSelection,
setTempSelection,
applyDefaultIfNeeded,
],
);
const handleQueryError = useCallback(
(error: { message?: string } | null): void => {
if (error) {
setErrorMessage(extractErrorMessage(error));
setIsRetryableError(checkIfRetryableError(error));
}
settleVariableFetch(variableData.name, 'failure');
},
[variableData.name],
);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
@@ -192,13 +216,22 @@ function DynamicVariableInput({
debouncedApiSearchText,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
variableFetchCycleId,
],
{
/*
* enabled if
* - we have dynamic variable source and attribute defined (ALWAYS)
* - AND
* - we're either still fetching variable options
* - OR
* - if variable is in idle state and we have already fetched options for it
**/
enabled:
variableData.type === 'DYNAMIC' &&
!!variableData.dynamicVariablesSource &&
!!variableData.dynamicVariablesAttribute,
queryFn: () =>
!!variableData.dynamicVariablesAttribute &&
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
queryFn: ({ signal }) =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
@@ -211,70 +244,10 @@ function DynamicVariableInput({
minTime,
maxTime,
existingQuery,
signal,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
const allNewOptions = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
applyDefaultIfNeeded(allNewOptions);
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
onSuccess: handleQuerySuccess,
onError: handleQueryError,
},
);
@@ -336,6 +309,8 @@ function DynamicVariableInput({
showRetryButton={isRetryableError}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
onSearch={handleSearch}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -3,8 +3,9 @@ import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { isArray, isEmpty, isString } from 'lodash-es';
import { AppState } from 'store/reducers';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -12,26 +13,18 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
import { areArraysEqual, settleVariableFetch } from './util';
import { VariableItemProps } from './VariableItem';
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
type QueryVariableInputProps = Pick<
VariableItemProps,
| 'variableData'
| 'existingVariables'
| 'onValueUpdate'
| 'variablesToGetUpdated'
| 'setVariablesToGetUpdated'
| 'dependencyData'
'variableData' | 'existingVariables' | 'onValueUpdate'
>;
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
@@ -43,6 +36,15 @@ function QueryVariableInput({
(state) => state.globalTime,
);
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const {
tempSelection,
setTempSelection,
@@ -60,16 +62,6 @@ function QueryVariableInput({
strategy: queryVariableSelectStrategy,
});
const validVariableUpdate = useCallback((): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
}, [variableData.name, variablesToGetUpdated]);
const getOptions = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(variablesRes: VariableResponseProps | null): void => {
@@ -103,18 +95,24 @@ function QueryVariableInput({
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (variableData.name && (valueNotInList || variableData.allSelected)) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(
variableData.name,
variableData.id,
newOptionsData,
true,
);
}
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
@@ -132,7 +130,11 @@ function QueryVariableInput({
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
@@ -141,10 +143,6 @@ function QueryVariableInput({
setOptionsData(newOptionsData);
// Apply default if no value is selected (e.g., new variable, first load)
applyDefaultIfNeeded(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
@@ -157,8 +155,6 @@ function QueryVariableInput({
onValueUpdate,
tempSelection,
setTempSelection,
validVariableUpdate,
setVariablesToGetUpdated,
applyDefaultIfNeeded,
],
);
@@ -169,27 +165,28 @@ function QueryVariableInput({
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
variableFetchCycleId,
],
{
enabled:
variableData &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
/*
* enabled if
* - we're either still fetching variable options
* - OR
* - if variable is in idle state and we have already fetched options for it
**/
enabled: isVariableFetching || (isVariableSettled && hasVariableFetchedOnce),
queryFn: ({ signal }) =>
dashboardVariablesQuery(
{
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
},
signal,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
settleVariableFetch(variableData.name, 'complete');
},
onError: (error: {
details: {
@@ -206,9 +203,7 @@ function QueryVariableInput({
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
settleVariableFetch(variableData.name, 'failure');
},
},
);
@@ -242,6 +237,8 @@ function QueryVariableInput({
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -28,6 +28,8 @@ interface SelectVariableInputProps {
showRetryButton?: boolean;
showIncompleteDataMessage?: boolean;
onSearch?: (searchTerm: string) => void;
waiting?: boolean;
waitingMessage?: string;
}
const MAX_TAG_DISPLAY_VALUES = 10;
@@ -65,6 +67,7 @@ function SelectVariableInput({
showRetryButton,
showIncompleteDataMessage,
onSearch,
waitingMessage,
}: SelectVariableInputProps): JSX.Element {
const commonProps = useMemo(
() => ({
@@ -78,7 +81,6 @@ function SelectVariableInput({
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
@@ -86,6 +88,8 @@ function SelectVariableInput({
'data-testid': 'variable-select',
onChange,
loading,
waitingMessage,
style: SelectItemStyle,
options,
errorMessage,
onRetry,
@@ -101,6 +105,7 @@ function SelectVariableInput({
defaultValue,
onChange,
loading,
waitingMessage,
options,
value,
errorMessage,

View File

@@ -47,15 +47,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -70,15 +61,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -94,15 +76,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -136,15 +109,6 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -167,15 +131,6 @@ describe('VariableItem', () => {
variableData={customVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -190,15 +145,6 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);

View File

@@ -1,7 +1,6 @@
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import CustomVariableInput from './CustomVariableInput';
@@ -21,18 +20,12 @@ export interface VariableItemProps {
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function VariableItem({
variableData,
onValueUpdate,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const { name, description, type: variableType } = variableData;
@@ -65,9 +58,6 @@ function VariableItem({
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
)}
{variableType === 'DYNAMIC' && (

View File

@@ -7,6 +7,19 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DynamicVariableInput';
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
useVariableFetchState: (): Record<string, unknown> => ({
variableFetchCycleId: 0,
variableFetchState: 'loading',
isVariableSettled: false,
isVariableFetching: true,
hasVariableFetchedOnce: false,
isVariableWaitingForDependencies: false,
variableDependencyWaitMessage: '',
}),
}));
// Don't mock the components - use real ones
// Mock for useQuery
@@ -217,9 +230,10 @@ describe('DynamicVariableInput Component', () => {
'',
'Traces',
'service.name',
0, // variableFetchCycleId
],
expect.objectContaining({
enabled: true, // Type is 'DYNAMIC'
enabled: true, // isVariableFetching is true from mock
queryFn: expect.any(Function),
onSuccess: expect.any(Function),
onError: expect.any(Function),

View File

@@ -8,15 +8,6 @@ import '@testing-library/jest-dom/extend-expect';
import VariableItem from '../VariableItem';
const mockOnValueUpdate = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
const baseDependencyData = {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
};
const TEST_VARIABLE_ID = 'test_variable';
const VARIABLE_SELECT_TESTID = 'variable-select';
@@ -32,9 +23,6 @@ const renderVariableItem = (
variableData={variableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={baseDependencyData}
/>
</MockQueryClientProvider>,
);

View File

@@ -2,14 +2,12 @@ import {
buildDependencies,
buildDependencyGraph,
buildParentDependencyGraph,
checkAPIInvocation,
onUpdateVariableNode,
VariableGraph,
} from '../util';
import {
buildDependenciesMock,
buildGraphMock,
checkAPIInvocationMock,
onUpdateVariableNodeMock,
} from './mock';
@@ -72,97 +70,6 @@ describe('dashboardVariables - utilities and processors', () => {
});
});
describe('checkAPIInvocation', () => {
const {
variablesToGetUpdated,
variableData,
parentDependencyGraph,
} = checkAPIInvocationMock;
const mockRootElement = {
name: 'deployment_environment',
key: '036a47cd-9ffc-47de-9f27-0329198964a8',
id: '036a47cd-9ffc-47de-9f27-0329198964a8',
modificationUUID: '5f71b591-f583-497c-839d-6a1590c3f60f',
selectedValue: 'production',
type: 'QUERY',
// ... other properties omitted for brevity
} as any;
describe('edge cases', () => {
it('should return false when variableData is empty', () => {
expect(
checkAPIInvocation(
variablesToGetUpdated,
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return true when parentDependencyGraph is empty', () => {
expect(
checkAPIInvocation(variablesToGetUpdated, variableData, {}),
).toBeFalsy();
});
});
describe('variable sequences', () => {
it('should return true for valid sequence', () => {
expect(
checkAPIInvocation(
['k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return false for invalid sequence', () => {
expect(
checkAPIInvocation(
['k8s_cluster_name', 'k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return false when variableData is not in sequence', () => {
expect(
checkAPIInvocation(
['deployment_environment', 'service_name', 'endpoint'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
});
describe('root element behavior', () => {
it('should return true for valid root element sequence', () => {
expect(
checkAPIInvocation(
[
'deployment_environment',
'service_name',
'endpoint',
'http_status_code',
],
mockRootElement,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return true for empty variablesToGetUpdated array', () => {
expect(
checkAPIInvocation([], mockRootElement, parentDependencyGraph),
).toBeTruthy();
});
});
});
describe('Graph Building Utilities', () => {
const { graph } = buildGraphMock;
const { variables } = buildDependenciesMock;
@@ -237,6 +144,72 @@ describe('dashboardVariables - utilities and processors', () => {
expect(buildDependencyGraph(graph)).toEqual(expected);
});
it('should return empty transitiveDescendants for an empty graph', () => {
const result = buildDependencyGraph({});
expect(result.transitiveDescendants).toEqual({});
expect(result.order).toEqual([]);
expect(result.hasCycle).toBe(false);
});
it('should compute transitiveDescendants for a linear chain (a -> b -> c)', () => {
const linearGraph: VariableGraph = {
a: ['b'],
b: ['c'],
c: [],
};
const result = buildDependencyGraph(linearGraph);
expect(result.transitiveDescendants).toEqual({
a: ['b', 'c'],
b: ['c'],
c: [],
});
});
it('should compute transitiveDescendants for a diamond dependency (a -> b, a -> c, b -> d, c -> d)', () => {
const diamondGraph: VariableGraph = {
a: ['b', 'c'],
b: ['d'],
c: ['d'],
d: [],
};
const result = buildDependencyGraph(diamondGraph);
expect(result.transitiveDescendants.a).toEqual(
expect.arrayContaining(['b', 'c', 'd']),
);
expect(result.transitiveDescendants.a).toHaveLength(3);
expect(result.transitiveDescendants.b).toEqual(['d']);
expect(result.transitiveDescendants.c).toEqual(['d']);
expect(result.transitiveDescendants.d).toEqual([]);
});
it('should handle disconnected components in transitiveDescendants', () => {
const disconnectedGraph: VariableGraph = {
a: ['b'],
b: [],
x: ['y'],
y: [],
};
const result = buildDependencyGraph(disconnectedGraph);
expect(result.transitiveDescendants.a).toEqual(['b']);
expect(result.transitiveDescendants.b).toEqual([]);
expect(result.transitiveDescendants.x).toEqual(['y']);
expect(result.transitiveDescendants.y).toEqual([]);
});
it('should return empty transitiveDescendants for all leaf nodes', () => {
const leafOnlyGraph: VariableGraph = {
a: [],
b: [],
c: [],
};
const result = buildDependencyGraph(leafOnlyGraph);
expect(result.transitiveDescendants).toEqual({
a: [],
b: [],
c: [],
});
});
});
describe('buildDependencies', () => {

View File

@@ -1,36 +1,3 @@
/* eslint-disable sonarjs/no-duplicate-string */
export const checkAPIInvocationMock = {
variablesToGetUpdated: [],
variableData: {
name: 'k8s_node_name',
key: '4d71d385-beaf-4434-8dbf-c62be68049fc',
allSelected: false,
customValue: '',
description: '',
id: '4d71d385-beaf-4434-8dbf-c62be68049fc',
modificationUUID: '77233d3c-96d7-4ccb-aa9d-11b04d563068',
multiSelect: false,
order: 6,
queryValue:
"SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}\nGROUP BY k8s_node_name",
selectedValue: 'gke-signoz-saas-si-consumer-bsc-e2sd4-a6d430fa-gvm2',
showALLOption: false,
sort: 'DISABLED',
textboxValue: '',
type: 'QUERY',
},
parentDependencyGraph: {
deployment_environment: [],
service_name: ['deployment_environment'],
endpoint: ['deployment_environment', 'service_name'],
http_status_code: ['endpoint'],
k8s_cluster_name: [],
environment: [],
k8s_node_name: ['k8s_cluster_name'],
k8s_namespace_name: ['k8s_cluster_name', 'k8s_node_name'],
},
} as any;
export const onUpdateVariableNodeMock = {
nodeToUpdate: 'deployment_environment',
graph: {

View File

@@ -1,9 +1,16 @@
import { OptionData } from 'components/NewSelect/types';
import { isEmpty, isNull } from 'lodash-es';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isEmpty } from 'lodash-es';
import {
IDashboardVariables,
IDependencyData,
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export function areArraysEqual(
@@ -45,30 +52,16 @@ const getDependentVariablesBasedOnVariableName = (
}
return variables
?.map((variable: any) => {
.map((variable) => {
if (variable.type === 'QUERY') {
// Combined pattern for all formats
// {{.variable_name}} - original format
// $variable_name - dollar prefix format
// [[variable_name]] - square bracket format
// {{variable_name}} - without dot format
const patterns = [
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`, // {{.var}}
`\\{\\{\\s*${variableName}\\s*\\}\\}`, // {{var}}
`\\$${variableName}\\b`, // $var
`\\[\\[\\s*${variableName}\\s*\\]\\]`, // [[var]]
];
const combinedRegex = new RegExp(patterns.join('|'));
const queryValue = variable.queryValue || '';
const dependVarReMatch = queryValue.match(combinedRegex);
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
if (textContainsVariableReference(queryValue, variableName)) {
return variable.name;
}
}
return null;
})
.filter((val: string | null) => !isNull(val));
.filter((val): val is string => val !== null);
};
export type VariableGraph = Record<string, string[]>;
@@ -300,33 +293,6 @@ export const onUpdateVariableNode = (
});
};
export const checkAPIInvocation = (
variablesToGetUpdated: string[],
variableData: IDashboardVariable,
parentDependencyGraph?: VariableGraph,
): boolean => {
if (isEmpty(variableData.name)) {
return false;
}
if (isEmpty(parentDependencyGraph)) {
return false;
}
// if no dependency then true
const haveDependency =
parentDependencyGraph?.[variableData.name || '']?.length > 0;
if (!haveDependency) {
return true;
}
// if variable is in the list and has dependency then check if its the top element in the queue then true else false
return (
variablesToGetUpdated.length > 0 &&
variablesToGetUpdated[0] === variableData.name
);
};
export const getOptionsForDynamicVariable = (
normalizedValues: (string | number | boolean)[],
relatedValues: string[],
@@ -391,3 +357,130 @@ export const getSelectValue = (
}
return selectedValue?.toString();
};
/**
* Merges multiple arrays of values into a single deduplicated string array.
*/
export function mergeUniqueStrings(
...arrays: (string | number | boolean)[][]
): string[] {
return [...new Set(arrays.flatMap((arr) => arr.map((v) => v.toString())))];
}
function isEligibleFilterVariable(
variable: IDashboardVariable,
currentVariableId: string,
): boolean {
if (variable.id === currentVariableId) {
return false;
}
if (variable.type !== 'DYNAMIC') {
return false;
}
if (!variable.dynamicVariablesAttribute) {
return false;
}
if (!variable.selectedValue || isEmpty(variable.selectedValue)) {
return false;
}
return !(variable.showALLOption && variable.allSelected);
}
function formatQueryValue(val: string): string {
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val;
}
return `'${val.replace(/'/g, "\\'")}'`;
}
function buildQueryPart(attribute: string, values: string[]): string {
const formatted = values.map(formatQueryValue);
if (formatted.length === 1) {
return `${attribute} = ${formatted[0]}`;
}
return `${attribute} IN [${formatted.join(', ')}]`;
}
/**
* Builds a filter query string from sibling dynamic variables' selected values.
* e.g. `k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'`
*/
export function buildExistingDynamicVariableQuery(
existingVariables: IDashboardVariables | null,
currentVariableId: string,
hasDynamicAttribute: boolean,
): string {
if (!existingVariables || !hasDynamicAttribute) {
return '';
}
const queryParts: string[] = [];
for (const variable of Object.values(existingVariables)) {
// Skip the current variable being processed
if (!isEligibleFilterVariable(variable, currentVariableId)) {
continue;
}
const rawValues = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = rawValues
.filter(
(val): val is string | number | boolean =>
val !== null && val !== undefined && val !== '',
)
.map((val) => val.toString());
if (validValues.length > 0 && variable.dynamicVariablesAttribute) {
queryParts.push(
buildQueryPart(variable.dynamicVariablesAttribute, validValues),
);
}
}
return queryParts.join(' AND ');
}
function isVariableInActiveFetchState(state: string | undefined): boolean {
return state === 'loading' || state === 'revalidating';
}
/**
* Completes or fails a variable's fetch state machine transition.
* No-ops if the variable is not currently in an active fetch state.
*/
export function settleVariableFetch(
name: string | undefined,
outcome: 'complete' | 'failure',
): void {
if (!name) {
return;
}
const currentState = variableFetchStore.getSnapshot().states[name];
if (!isVariableInActiveFetchState(currentState)) {
return;
}
if (outcome === 'complete') {
onVariableFetchComplete(name);
} else {
onVariableFetchFailure(name);
}
}
export function extractErrorMessage(
error: { message?: string } | null,
): string {
if (!error) {
return SOMETHING_WENT_WRONG;
}
return (
error.message ||
'Please make sure configuration is valid and you have required setup and permissions'
);
}

View File

@@ -1,4 +1,31 @@
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
variableFetchStore: {
getSnapshot: jest.fn(),
},
onVariableFetchComplete: jest.fn(),
onVariableFetchFailure: jest.fn(),
}));
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
areArraysEqual,
buildExistingDynamicVariableQuery,
extractErrorMessage,
mergeUniqueStrings,
onUpdateVariableNode,
settleVariableFetch,
VariableGraph,
} from './util';
// ────────────────────────────────────────────────────────────────
// Existing tests
// ────────────────────────────────────────────────────────────────
describe('areArraysEqual', () => {
it('should return true for equal arrays with same order', () => {
@@ -149,3 +176,348 @@ describe('onUpdateVariableNode', () => {
expect(visited).toEqual(['namespace', 'service', 'pod']);
});
});
// ────────────────────────────────────────────────────────────────
// New tests for functions added in recent commits
// ────────────────────────────────────────────────────────────────
function makeDynamicVar(
overrides: Partial<IDashboardVariable> & { id: string },
): IDashboardVariable {
return {
name: overrides.id,
description: '',
type: 'DYNAMIC',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
allSelected: false,
dynamicVariablesAttribute: 'attr',
selectedValue: 'some-value',
...overrides,
} as IDashboardVariable;
}
describe('mergeUniqueStrings', () => {
it('should merge two arrays and deduplicate', () => {
expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toEqual(['a', 'b', 'c']);
});
it('should convert numbers and booleans to strings', () => {
expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toEqual([
'1',
'true',
'hello',
'2',
'false',
]);
});
it('should deduplicate when number and its string form both appear', () => {
expect(mergeUniqueStrings([42], ['42'])).toEqual(['42']);
});
it('should handle a single array', () => {
expect(mergeUniqueStrings(['x', 'y', 'x'])).toEqual(['x', 'y']);
});
it('should handle three or more arrays', () => {
expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toEqual([
'a',
'b',
'c',
]);
});
it('should return empty array when no arrays are provided', () => {
expect(mergeUniqueStrings()).toEqual([]);
});
it('should return empty array when all input arrays are empty', () => {
expect(mergeUniqueStrings([], [], [])).toEqual([]);
});
it('should preserve order of first occurrence', () => {
expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toEqual(['c', 'a', 'b']);
});
});
describe('buildExistingDynamicVariableQuery', () => {
// --- Guard clauses ---
it('should return empty string when existingVariables is null', () => {
expect(buildExistingDynamicVariableQuery(null, 'v1', true)).toBe('');
});
it('should return empty string when hasDynamicAttribute is false', () => {
const variables = { v2: makeDynamicVar({ id: 'v2' }) };
expect(buildExistingDynamicVariableQuery(variables, 'v1', false)).toBe('');
});
// --- Eligibility filtering ---
it('should skip the current variable (same id)', () => {
const variables = {
v1: makeDynamicVar({
id: 'v1',
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip non-DYNAMIC variables', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables without dynamicVariablesAttribute', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: undefined,
selectedValue: 'val',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with null selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: null }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with empty string selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: '' }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with empty array selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: [] }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables where showALLOption and allSelected are both true', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
showALLOption: true,
allSelected: true,
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should include variable with showALLOption true but allSelected false', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
showALLOption: true,
allSelected: false,
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"ns = 'prod'",
);
});
// --- Value formatting ---
it('should quote string values in the query', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: 'zeus',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name = 'zeus'",
);
});
it('should leave numeric values unquoted', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'http.status_code',
selectedValue: '200',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
'http.status_code = 200',
);
});
it('should escape single quotes in string values', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'user.name',
selectedValue: "O'Brien",
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"user.name = 'O\\'Brien'",
);
});
it('should build an IN clause for array selectedValue with multiple items', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: ['zeus', 'gene'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name IN ['zeus', 'gene']",
);
});
it('should handle mix of numeric and string values in IN clause', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'http.status_code',
selectedValue: ['200', 'unknown'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"http.status_code IN [200, 'unknown']",
);
});
it('should filter out empty string values from array', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'region',
selectedValue: ['us-east', '', 'eu-west'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"region IN ['us-east', 'eu-west']",
);
});
// --- Multiple siblings ---
it('should join multiple sibling variables with AND', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: ['zeus', 'gene'],
}),
v3: makeDynamicVar({
id: 'v3',
dynamicVariablesAttribute: 'doc_op_type',
selectedValue: 'test',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'",
);
});
it('should return empty string when no variables are eligible', () => {
const variables = {
v1: makeDynamicVar({ id: 'v1' }), // same as current — skipped
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }), // not DYNAMIC
v3: makeDynamicVar({ id: 'v3', selectedValue: null }), // no value
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
});
describe('settleVariableFetch', () => {
const mockGetSnapshot = variableFetchStore.getSnapshot as jest.Mock;
const mockComplete = onVariableFetchComplete as jest.Mock;
const mockFailure = onVariableFetchFailure as jest.Mock;
beforeEach(() => {
jest.clearAllMocks();
});
it('should no-op when name is undefined', () => {
settleVariableFetch(undefined, 'complete');
expect(mockGetSnapshot).not.toHaveBeenCalled();
expect(mockComplete).not.toHaveBeenCalled();
expect(mockFailure).not.toHaveBeenCalled();
});
it.each(['idle', 'waiting', 'error', undefined] as const)(
'should no-op when variable state is %s',
(state) => {
mockGetSnapshot.mockReturnValue({ states: { myVar: state } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).not.toHaveBeenCalled();
expect(mockFailure).not.toHaveBeenCalled();
},
);
it('should call onVariableFetchComplete when state is loading and outcome is complete', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).toHaveBeenCalledWith('myVar');
expect(mockFailure).not.toHaveBeenCalled();
});
it('should call onVariableFetchComplete when state is revalidating and outcome is complete', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).toHaveBeenCalledWith('myVar');
expect(mockFailure).not.toHaveBeenCalled();
});
it('should call onVariableFetchFailure when state is loading and outcome is failure', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
settleVariableFetch('myVar', 'failure');
expect(mockFailure).toHaveBeenCalledWith('myVar');
expect(mockComplete).not.toHaveBeenCalled();
});
it('should call onVariableFetchFailure when state is revalidating and outcome is failure', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
settleVariableFetch('myVar', 'failure');
expect(mockFailure).toHaveBeenCalledWith('myVar');
expect(mockComplete).not.toHaveBeenCalled();
});
});
describe('extractErrorMessage', () => {
const FALLBACK_MESSAGE =
'Please make sure configuration is valid and you have required setup and permissions';
it('should return SOMETHING_WENT_WRONG when error is null', () => {
expect(extractErrorMessage(null)).toBe('Something went wrong');
});
it('should return the error message when it exists', () => {
expect(extractErrorMessage({ message: 'Query timeout' })).toBe(
'Query timeout',
);
});
it('should return fallback when error object has no message property', () => {
expect(extractErrorMessage({})).toBe(FALLBACK_MESSAGE);
});
it('should return fallback when error.message is empty string', () => {
expect(extractErrorMessage({ message: '' })).toBe(FALLBACK_MESSAGE);
});
it('should return fallback when error.message is undefined', () => {
expect(extractErrorMessage({ message: undefined })).toBe(FALLBACK_MESSAGE);
});
});

View File

@@ -1,17 +1,11 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableItemProps } from '../VariableItem';
export interface VariableSelectStrategy {
handleChange(params: {
value: string | string[];
variableData: IDashboardVariable;
variableData: VariableItemProps['variableData'];
onValueUpdate: VariableItemProps['onValueUpdate'];
optionsData: (string | number | boolean)[];
allAvailableOptionStrings: string[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}): void;
}

View File

@@ -17,6 +17,19 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
useVariableFetchState: (): Record<string, unknown> => ({
variableFetchCycleId: 0,
variableFetchState: 'loading',
isVariableSettled: false,
isVariableFetching: true,
hasVariableFetchedOnce: false,
isVariableWaitingForDependencies: false,
variableDependencyWaitMessage: '',
}),
}));
// Mock the getFieldValues API
jest.mock('api/dynamicVariables/getFieldValues', () => ({
getFieldValues: jest.fn(),
@@ -95,7 +108,7 @@ describe('Dynamic Variable Default Behavior', () => {
}
}
if (queryFn) {
queryFn();
queryFn({ signal: undefined });
}
}
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
@@ -234,6 +247,7 @@ describe('Dynamic Variable Default Behavior', () => {
'2023-01-01T00:00:00Z',
'2023-01-02T00:00:00Z',
'',
undefined, // signal
);
});
@@ -487,6 +501,7 @@ describe('Dynamic Variable Default Behavior', () => {
'2023-01-01T00:00:00Z',
'2023-01-02T00:00:00Z',
'',
undefined, // signal
);
});

View File

@@ -49,15 +49,11 @@ const mockDashboard = {
// Mock the dashboard provider with stable functions to prevent infinite loops
const mockSetSelectedDashboard = jest.fn();
const mockUpdateLocalStorageDashboardVariables = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): any => ({
selectedDashboard: mockDashboard,
setSelectedDashboard: mockSetSelectedDashboard,
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
variablesToGetUpdated: ['env'], // Stable initial value
setVariablesToGetUpdated: mockSetVariablesToGetUpdated,
}),
}));

View File

@@ -11,6 +11,7 @@ import {
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { get } from 'lodash-es';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -77,6 +78,12 @@ export function prepareBarPanelConfig({
builder.setBands(getInitialStackedBands(seriesCount));
}
const stepIntervals: Record<string, number> = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const seriesList: QueryData[] = apiResponse?.data?.result || [];
seriesList.forEach((series) => {
const baseLabelName = getLabelName(
@@ -89,6 +96,8 @@ export function prepareBarPanelConfig({
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
const currentStepInterval = get(stepIntervals, series.queryName, undefined);
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Bar,
@@ -101,6 +110,7 @@ export function prepareBarPanelConfig({
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
stepInterval: currentStepInterval,
});
});

View File

@@ -6,10 +6,12 @@ import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableReference';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';
@@ -53,7 +55,6 @@ function GridCardGraph({
customOnRowClick,
customTimeRangeWindowForCoRelation,
enableDrillDown,
widgetsByDynamicVariableId,
}: GridCardGraphProps): JSX.Element {
const dispatch = useDispatch();
const [errorMessage, setErrorMessage] = useState<string>();
@@ -64,8 +65,8 @@ function GridCardGraph({
toScrollWidgetId,
setToScrollWidgetId,
setDashboardQueryRangeCalled,
variablesToGetUpdated,
} = useDashboard();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -117,10 +118,25 @@ function GridCardGraph({
const updatedQuery = widget?.query;
const referencedVariableNames = useMemo(() => {
if (!variables || !updatedQuery) {
return [];
}
const allNames = Object.values(variables)
.map((v) => v.name)
.filter((name): name is string => !!name);
return getVariableReferencesInQuery(updatedQuery, allNames);
}, [updatedQuery, variables]);
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
const isPanelWaitingOnAnyVariable = useIsPanelWaitingOnVariable(
referencedVariableNames,
);
const queryEnabledCondition =
isVisible && !isEmptyWidget && isQueryEnabled && !isPanelWaitingOnAnyVariable;
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
if (widget.panelTypes !== PANEL_TYPES.LIST) {
@@ -177,27 +193,6 @@ function GridCardGraph({
[requestData.query],
);
// Bring back dependency on variable chaining for panels to refetch,
// but only for non-dynamic variables. We derive a stable token from
// the head of the variablesToGetUpdated queue when it's non-dynamic.
const nonDynamicVariableChainToken = useMemo(() => {
if (!variablesToGetUpdated || variablesToGetUpdated.length === 0) {
return undefined;
}
if (!variables) {
return undefined;
}
const headName = variablesToGetUpdated[0];
const variableObj = Object.values(variables).find(
(variable) => variable?.name === headName,
);
if (variableObj && variableObj.type !== 'DYNAMIC') {
return headName;
}
return undefined;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variablesToGetUpdated, variables]);
const queryResponse = useGetQueryRange(
{
...requestData,
@@ -224,11 +219,7 @@ function GridCardGraph({
requestData,
variables
? Object.entries(variables).reduce((acc, [id, variable]) => {
if (
variable.type !== 'DYNAMIC' ||
(widgetsByDynamicVariableId?.[variable.id] &&
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
) {
if (variable.name && referencedVariableNames.includes(variable.name)) {
return { ...acc, [id]: variable.selectedValue };
}
return acc;
@@ -237,9 +228,6 @@ function GridCardGraph({
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
? [customTimeRange.startTime, customTimeRange.endTime]
: []),
// Include non-dynamic variable chaining token to drive refetches
// only when a non-dynamic variable is at the head of the queue
...(nonDynamicVariableChainToken ? [nonDynamicVariableChainToken] : []),
],
retry(failureCount, error): boolean {
if (
@@ -252,7 +240,7 @@ function GridCardGraph({
return failureCount < 2;
},
keepPreviousData: true,
enabled: queryEnabledCondition && !nonDynamicVariableChainToken,
enabled: queryEnabledCondition,
refetchOnMount: false,
onError: (error) => {
const errorMessage =
@@ -319,7 +307,7 @@ function GridCardGraph({
threshold={threshold}
headerMenuList={menuList}
isFetchingResponse={
queryResponse.isFetching || variablesToGetUpdated.length > 0
queryResponse.isFetching || isPanelWaitingOnAnyVariable
}
setRequestData={setRequestData}
onClickHandler={onClickHandler}

View File

@@ -72,7 +72,6 @@ export interface GridCardGraphProps {
customOnRowClick?: (record: RowData) => void;
customTimeRangeWindowForCoRelation?: string | undefined;
enableDrillDown?: boolean;
widgetsByDynamicVariableId?: Record<string, string[]>;
}
export interface GetGraphVisibilityStateOnLegendClickProps {

View File

@@ -16,7 +16,6 @@ import { themeColors } from 'constants/theme';
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
import useComponentPermission from 'hooks/useComponentPermission';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
@@ -102,8 +101,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
Record<string, { widgets: Layout[]; collapsed: boolean }>
>({});
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
useEffect(() => {
setCurrentPanelMap(panelMap);
}, [panelMap]);
@@ -617,7 +614,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
onDragSelect={onDragSelect}
dataAvailable={checkIfDataExists}
enableDrillDown={enableDrillDown}
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
/>
</Card>
</CardContainer>

View File

@@ -5,16 +5,12 @@ import { PanelMode } from 'container/DashboardContainer/visualization/panels/typ
import { WidgetGraphComponentProps } from 'container/GridCardLayout/GridCard/types';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
import { QueryData } from 'types/api/widgets/getQuery';
export type PanelWrapperProps = {
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
queryResponse: UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
widget: Widgets;
setRequestData?: WidgetGraphComponentProps['setRequestData'];
isFullViewMode?: boolean;

View File

@@ -0,0 +1,316 @@
import { act, renderHook } from '@testing-library/react';
import { dashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { useIsPanelWaitingOnVariable } from '../useVariableFetchState';
function makeVariable(
overrides: Partial<IDashboardVariable> & { id: string },
): IDashboardVariable {
return {
name: overrides.id,
description: '',
type: 'QUERY',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
...overrides,
};
}
function resetStores(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
function setFetchStates(states: Record<string, VariableFetchState>): void {
variableFetchStore.set(() => ({
states,
lastUpdated: {},
cycleIds: {},
}));
}
function setDashboardVariables(
overrides: Partial<IDashboardVariablesStoreState>,
): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
...overrides,
}));
}
describe('useIsPanelWaitingOnVariable', () => {
beforeEach(() => {
resetStores();
});
it('should return false when variableNames is empty', () => {
const { result } = renderHook(() => useIsPanelWaitingOnVariable([]));
expect(result.current).toBe(false);
});
it('should return false when all referenced variables are idle', () => {
setFetchStates({ a: 'idle', b: 'idle' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val1' }),
b: makeVariable({ id: 'b', selectedValue: 'val2' }),
},
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
expect(result.current).toBe(false);
});
it('should return true when a variable is loading with empty selectedValue', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return true when a variable is waiting with empty selectedValue', () => {
setFetchStates({ a: 'waiting' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: '' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return true when a variable is revalidating with empty selectedValue', () => {
setFetchStates({ a: 'revalidating' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return false when a variable is loading but has a selectedValue', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'some-value' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return true for DYNAMIC variable with allSelected=true that is loading', () => {
setFetchStates({ dyn: 'loading' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'some-val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should return true for DYNAMIC variable with allSelected=true that is waiting', () => {
setFetchStates({ dyn: 'waiting' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should return false for DYNAMIC variable with allSelected=true that is idle', () => {
setFetchStates({ dyn: 'idle' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(false);
});
it('should return false for non-DYNAMIC variable with allSelected=false and non-empty value that is loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({
id: 'a',
selectedValue: 'val',
allSelected: false,
}),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return true if any one of multiple variables is blocking', () => {
setFetchStates({ a: 'idle', b: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val' }),
b: makeVariable({ id: 'b', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
expect(result.current).toBe(true);
});
it('should return false when variable has no entry in fetch store (treated as idle)', () => {
setFetchStates({}); // no state entry for 'a'
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return false when variable is in error state with empty selectedValue', () => {
setFetchStates({ a: 'error' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should react to store updates', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
// Simulate variable fetch completing
act(() => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
});
expect(result.current).toBe(false);
});
it('should handle DYNAMIC variable with allSelected=false and empty selectedValue as blocking', () => {
setFetchStates({ dyn: 'loading' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: undefined,
allSelected: false,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should handle variable with array selectedValue as non-blocking when loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: ['val1', 'val2'] }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should handle variable with empty array selectedValue as blocking when loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: [] }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
});

View File

@@ -10,13 +10,12 @@ import {
GetQueryResultsProps,
} from 'lib/dashboard/getQueryResults';
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import { SuccessResponse, Warning } from 'types/api';
import APIError from 'types/api/error';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
type UseGetQueryRangeOptions = UseQueryOptions<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
MetricQueryRangeSuccessResponse,
APIError | Error
>;
@@ -30,10 +29,7 @@ type UseGetQueryRange = (
widgetIndex: number;
publicDashboardId: string;
},
) => UseQueryResult<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
Error
>;
) => UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
export const useGetQueryRange: UseGetQueryRange = (
requestData,
@@ -145,10 +141,7 @@ export const useGetQueryRange: UseGetQueryRange = (
};
}, [options?.retry]);
return useQuery<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
APIError | Error
>({
return useQuery<MetricQueryRangeSuccessResponse, APIError | Error>({
queryFn: async ({ signal }) =>
GetMetricQueryRange(
modifiedRequestData,

View File

@@ -19,7 +19,11 @@ import { Pagination } from 'hooks/queryPagination';
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
import { isEmpty } from 'lodash-es';
import { SuccessResponse, SuccessResponseV2, Warning } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import {
MetricQueryRangeSuccessResponse,
MetricRangePayloadProps,
} from 'types/api/metrics/getQueryRange';
import { ExecStats, MetricRangePayloadV5 } from 'types/api/v5/queryRange';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -205,13 +209,13 @@ export async function GetMetricQueryRange(
widgetIndex: number;
publicDashboardId: string;
},
): Promise<SuccessResponse<MetricRangePayloadProps> & { warning?: Warning }> {
): Promise<MetricQueryRangeSuccessResponse> {
let legendMap: Record<string, string>;
let response:
| SuccessResponse<MetricRangePayloadProps>
| SuccessResponseV2<MetricRangePayloadV5>
| (SuccessResponse<MetricRangePayloadProps> & { warning?: Warning });
| MetricQueryRangeSuccessResponse
| SuccessResponseV2<MetricRangePayloadV5>;
let warning: Warning | undefined;
let meta: ExecStats | undefined;
const panelType = props.originalGraphType || props.graphType;
@@ -299,6 +303,7 @@ export async function GetMetricQueryRange(
);
warning = response.payload.warning || undefined;
meta = response.payload.meta || undefined;
} else {
const v5Response = await getQueryRangeV5(
v5Result.queryPayload,
@@ -318,6 +323,7 @@ export async function GetMetricQueryRange(
);
warning = response.payload.warning || undefined;
meta = response.payload.meta || undefined;
}
} else {
const legacyResult = prepareQueryRangePayload(props);
@@ -384,6 +390,7 @@ export async function GetMetricQueryRange(
return {
...response,
warning,
meta,
};
}

View File

@@ -81,6 +81,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
barAlignment,
barMaxWidth,
barWidthFactor,
stepInterval,
} = this.props;
if (pathBuilder) {
return { paths: pathBuilder };
@@ -104,6 +105,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
barAlignment,
barMaxWidth,
barWidthFactor,
stepInterval,
});
return pathsBuilder(self, seriesIdx, idx0, idx1);
@@ -209,12 +211,14 @@ function getPathBuilder({
barAlignment = BarAlignment.Center,
barWidthFactor = 0.6,
barMaxWidth = 200,
stepInterval,
}: {
drawStyle: DrawStyle;
lineInterpolation?: LineInterpolation;
barAlignment?: BarAlignment;
barMaxWidth?: number;
barWidthFactor?: number;
stepInterval?: number;
}): Series.PathBuilder {
if (!builders) {
throw new Error('Required uPlot path builders are not available');
@@ -222,14 +226,13 @@ function getPathBuilder({
if (drawStyle === DrawStyle.Bar) {
const pathBuilders = uPlot.paths;
const barsConfigKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
if (!builders[barsConfigKey] && pathBuilders.bars) {
builders[barsConfigKey] = pathBuilders.bars({
size: [barWidthFactor, barMaxWidth],
align: barAlignment,
});
}
return builders[barsConfigKey];
return getBarPathBuilder({
pathBuilders,
barAlignment,
barWidthFactor,
barMaxWidth,
stepInterval,
});
}
if (drawStyle === DrawStyle.Line) {
@@ -247,4 +250,81 @@ function getPathBuilder({
return builders.spline;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function getBarPathBuilder({
pathBuilders,
barAlignment,
barWidthFactor,
barMaxWidth,
stepInterval,
}: {
pathBuilders: typeof uPlot.paths;
barAlignment: BarAlignment;
barWidthFactor: number;
barMaxWidth: number;
stepInterval?: number;
}): Series.PathBuilder {
if (!builders) {
throw new Error('Required uPlot path builders are not available');
}
const barsPathBuilderFactory = pathBuilders.bars;
// When a stepInterval is provided (in seconds), cap the maximum bar width
// so that a single bar never visually spans more than stepInterval worth
// of time on the x-scale.
if (
typeof stepInterval === 'number' &&
stepInterval > 0 &&
barsPathBuilderFactory
) {
return (
self: uPlot,
seriesIdx: number,
idx0: number,
idx1: number,
): Series.Paths | null => {
let effectiveBarMaxWidth = barMaxWidth;
const xScale = self.scales.x as uPlot.Scale | undefined;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = self.valToPos(start, 'x');
const endPx = self.valToPos(end, 'x');
const intervalPx = Math.abs(endPx - startPx);
if (intervalPx > 0) {
effectiveBarMaxWidth =
typeof barMaxWidth === 'number'
? Math.min(barMaxWidth, intervalPx)
: intervalPx;
}
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;
if (builders && !builders[barsCfgKey]) {
builders[barsCfgKey] = barsPathBuilderFactory({
size: [barWidthFactor, effectiveBarMaxWidth],
align: barAlignment,
});
}
return builders && builders[barsCfgKey]
? builders[barsCfgKey](self, seriesIdx, idx0, idx1)
: null;
};
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
if (!builders[barsCfgKey] && barsPathBuilderFactory) {
builders[barsCfgKey] = barsPathBuilderFactory({
size: [barWidthFactor, barMaxWidth],
align: barAlignment,
});
}
return builders[barsCfgKey];
}
export type { SeriesProps };

View File

@@ -176,6 +176,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
show?: boolean;
spanGaps?: boolean;
isDarkMode?: boolean;
stepInterval?: number;
}
export interface LegendItem {

View File

@@ -84,8 +84,6 @@ const DashboardContext = createContext<IDashboardContext>({
toScrollWidgetId: '',
setToScrollWidgetId: () => {},
updateLocalStorageDashboardVariables: () => {},
variablesToGetUpdated: [],
setVariablesToGetUpdated: () => {},
dashboardQueryRangeCalled: false,
setDashboardQueryRangeCalled: () => {},
selectedRowWidgetId: '',
@@ -183,10 +181,6 @@ export function DashboardProvider({
exact: true,
});
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
[],
);
const [layouts, setLayouts] = useState<Layout[]>([]);
const [panelMap, setPanelMap] = useState<
@@ -517,8 +511,6 @@ export function DashboardProvider({
updatedTimeRef,
setToScrollWidgetId,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,
@@ -541,8 +533,6 @@ export function DashboardProvider({
toScrollWidgetId,
updateLocalStorageDashboardVariables,
currentDashboard,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,

View File

@@ -47,8 +47,6 @@ export interface IDashboardContext {
allSelected: boolean,
isDynamic?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dashboardQueryRangeCalled: boolean;
setDashboardQueryRangeCalled: (value: boolean) => void;
selectedRowWidgetId: string | null;

View File

@@ -1,13 +1,14 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { EQueryType } from 'types/common/dashboard';
import { Warning } from '..';
import { SuccessResponse, Warning } from '..';
import {
IBuilderFormula,
IBuilderQuery,
IClickHouseQuery,
IPromQLQuery,
} from '../queryBuilder/queryBuilderData';
import { ExecStats } from '../v5/queryRange';
import { QueryData, QueryDataV3 } from '../widgets/getQuery';
export type QueryRangePayload = {
@@ -35,8 +36,15 @@ export interface MetricRangePayloadProps {
newResult: MetricRangePayloadV3;
warnings?: string[];
};
meta?: ExecStats;
}
/** Query range success response including optional warning and meta */
export type MetricQueryRangeSuccessResponse = SuccessResponse<
MetricRangePayloadProps,
unknown
> & { warning?: Warning; meta?: ExecStats };
export interface MetricRangePayloadV3 {
data: {
result: QueryDataV3[];
@@ -44,4 +52,5 @@ export interface MetricRangePayloadV3 {
warnings?: string[];
};
warning?: Warning;
meta?: ExecStats;
}

View File

@@ -334,6 +334,7 @@ export interface ExecStats {
rowsScanned: number;
bytesScanned: number;
durationMs: number;
stepIntervals: Record<string, number>;
}
export interface Label {

View File

@@ -3,9 +3,8 @@ package flagger
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
var (
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
)
func MustNewRegistry() featuretypes.Registry {
@@ -18,14 +17,6 @@ func MustNewRegistry() featuretypes.Registry {
DefaultVariant: featuretypes.MustNewName("disabled"),
Variants: featuretypes.NewBooleanVariants(),
},
&featuretypes.Feature{
Name: FeatureInterpolationEnabled,
Kind: featuretypes.KindBoolean,
Stage: featuretypes.StageExperimental,
Description: "Controls whether to enable interpolation",
DefaultVariant: featuretypes.MustNewName("disabled"),
Variants: featuretypes.NewBooleanVariants(),
},
&featuretypes.Feature{
Name: FeatureKafkaSpanEval,
Kind: featuretypes.KindBoolean,

View File

@@ -205,7 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
key.Indexes = matchingKey.Indexes
key.Materialized = matchingKey.Materialized
key.JSONPlan = matchingKey.JSONPlan
return actions
} else {
// multiple matching keys, set materialized only if all the keys are materialized

View File

@@ -483,6 +483,22 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
value1 := v.Visit(values[0])
value2 := v.Visit(values[1])
switch value1.(type) {
case float64:
if _, ok := value2.(float64); !ok {
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
return ""
}
case string:
if _, ok := value2.(string); !ok {
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
return ""
}
default:
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
return ""
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
@@ -855,7 +871,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
// 1. either user meant key ( this is already handled above in fieldKeysForName )
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
// Note:
// Note:
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
// If user only wants to search `key`, then they have to use `key`
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity

View File

@@ -375,13 +375,6 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
}
if os.Getenv("INTERPOLATION_ENABLED") != "" {
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
if config.Flagger.Config.Boolean == nil {
config.Flagger.Config.Boolean = make(map[string]bool)
}
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
}
}
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {

View File

@@ -5,6 +5,7 @@ import (
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -73,7 +74,7 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
cteArgs [][]any
)
if b.metricsStatementBuilder.CanShortCircuitDelta(query) {
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
// spatial_aggregation_cte directly for certain delta queries
if frag, args, err := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables); err != nil {
return nil, err
@@ -91,8 +92,9 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
return nil, err
} else if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -122,13 +124,16 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
for _, g := range query.GroupBy {
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
if err != nil {
return "", []any{}, err
return "", nil, err
}
sb.SelectMore(col)
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -150,7 +155,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
Variables: variables,
}, start, end)
if err != nil {
return "", []any{}, err
return "", nil, err
}
}
if filterWhere != nil {
@@ -208,8 +213,11 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -278,7 +286,10 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
@@ -315,25 +326,23 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
case metrictypes.TimeAggregationIncrease:
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
@@ -348,7 +357,15 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
_ uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, []any) {
) (string, []any, error) {
if query.Aggregations[0].SpaceAggregation.IsZero() {
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
)
}
sb := sqlbuilder.NewSelectBuilder()
sb.Select("ts")
@@ -365,5 +382,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
}

View File

@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
},
expectedErr: nil,
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
},
expectedErr: nil,
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
},
expectedErr: nil,
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
},
expectedErr: nil,

View File

@@ -3,6 +3,7 @@ package telemetrymeter
import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
)
@@ -63,7 +64,7 @@ func AggregationColumnForSamplesTable(
temporality metrictypes.Temporality,
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) string {
) (string, error) {
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
var aggregationColumn string
switch temporality {
@@ -190,5 +191,13 @@ func AggregationColumnForSamplesTable(
}
}
return aggregationColumn
if aggregationColumn == "" {
return "", errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
)
}
return aggregationColumn, nil
}

View File

@@ -29,13 +29,7 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
if operator.IsStringSearchOperator() {
value = querybuilder.FormatValueForContains(value)
}
@@ -44,6 +38,18 @@ func (c *conditionBuilder) conditionFor(
return "", err
}
// TODO(srikanthccv): use the same data type collision handling when metrics schemas are updated
switch v := value.(type) {
case float64:
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
case []any:
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
if _, ok := v[0].(float64); ok {
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
}
}
}
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(tblFieldName, value), nil

View File

@@ -5,67 +5,27 @@ import (
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/slices"
)
const (
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
RateTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))`
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s, ((%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
IncreaseTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value, per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)`
RateWithInterpolation = `
CASE
WHEN row_number() OVER rate_window = 1 THEN
-- First row: try to interpolate using next value
CASE
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
-- Assume linear growth to next point
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
(leadInFrame(ts, 1) OVER rate_window - ts)
ELSE
0 -- No next value either, can't interpolate
END
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
-- Counter reset detected
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
ELSE
-- Normal case: calculate rate
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
(ts - lagInFrame(ts, 1) OVER rate_window)
END`
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1) OVER rate_window), (%s - lagInFrame(%s, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))) AS per_series_value`
IncreaseWithInterpolation = `
CASE
WHEN row_number() OVER rate_window = 1 THEN
-- First row: try to interpolate using next value
CASE
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
-- Calculate the interpolated increase for this interval
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
(leadInFrame(ts, 1) OVER rate_window - ts)) *
(leadInFrame(ts, 1) OVER rate_window - ts)
ELSE
0 -- No next value either, can't interpolate
END
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
-- Counter reset detected: the increase is the current value
per_series_value
ELSE
-- Normal case: calculate increase
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
END`
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
)
type MetricQueryStatementBuilder struct {
@@ -147,54 +107,6 @@ func (b *MetricQueryStatementBuilder) Build(
return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
}
// Fastpath (no fingerprint grouping)
// canShortCircuitDelta returns true if we can use the optimized query
// for the given query
// This is used to avoid the group by fingerprint thus improving the performance
// for certain queries
// cases where we can short circuit:
// 1. time aggregation = (rate|increase) and space aggregation = sum
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
//
// 2. time aggregation = sum and space aggregation = sum
// - sum of sums is same as sum of all values
//
// 3. time aggregation = min and space aggregation = min
// - min of mins is same as min of all values
//
// 4. time aggregation = max and space aggregation = max
// - max of maxs is same as max of all values
//
// 5. special case exphist, there is no need for per series/fingerprint aggregation
// we can directly use the quantilesDDMerge function
//
// all of this is true only for delta metrics
func (b *MetricQueryStatementBuilder) CanShortCircuitDelta(q qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) bool {
if q.Aggregations[0].Temporality != metrictypes.Delta {
return false
}
ta := q.Aggregations[0].TimeAggregation
sa := q.Aggregations[0].SpaceAggregation
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) && sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
return true
}
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
return true
}
if q.Aggregations[0].Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
return true
}
return false
}
func (b *MetricQueryStatementBuilder) buildPipelineStatement(
ctx context.Context,
start, end uint64,
@@ -256,10 +168,11 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
return nil, err
}
if b.CanShortCircuitDelta(query) {
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
// spatial_aggregation_cte directly for certain delta queries
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
if frag != "" {
if frag, args, err := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); err != nil {
return nil, err
} else if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -273,8 +186,9 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
return nil, err
} else if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -294,7 +208,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
timeSeriesCTE string,
timeSeriesCTEArgs []any,
) (string, []any) {
) (string, []any, error) {
stepSec := int64(query.StepInterval.Seconds())
sb := sqlbuilder.NewSelectBuilder()
@@ -307,11 +221,15 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol := AggregationColumnForSamplesTable(
aggCol, err := AggregationColumnForSamplesTable(
start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints,
)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -334,7 +252,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
}
func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
@@ -437,8 +355,12 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -461,7 +383,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
}
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
ctx context.Context,
_ context.Context,
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
timeSeriesCTE string,
@@ -479,7 +401,10 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
baseSb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
@@ -496,36 +421,25 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
// ! TODO (balanikaran) Get OrgID via function parameter instead of valuer.GenerateUUID()
interpolationEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureInterpolationEnabled, featuretypes.NewFlaggerEvaluationContext(valuer.GenerateUUID()))
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
if interpolationEnabled {
rateExpr = RateWithInterpolation
}
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", RateTmpl))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
case metrictypes.TimeAggregationIncrease:
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
if interpolationEnabled {
incExpr = IncreaseWithInterpolation
}
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", IncreaseTmpl))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
@@ -534,7 +448,6 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
}
}
// because RateInterpolation is not enabled anywhere due to some gaps in the logic wrt cache handling, it hasn't been considered for the multi temporality
func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
_ context.Context,
start, end uint64,
@@ -553,18 +466,32 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggForDeltaTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggForCumulativeTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggForDeltaTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggForCumulativeTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggForDeltaTemporality = fmt.Sprintf("%s/%d", aggForDeltaTemporality, stepSec)
}
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, aggForCumulativeTemporality, aggForCumulativeTemporality, start)
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality,
aggForDeltaTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality,
)
sb.SelectMore(rateExpr)
case metrictypes.TimeAggregationIncrease:
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, start)
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality,
aggForDeltaTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality,
)
sb.SelectMore(increaseExpr)
default:
expr := fmt.Sprintf(OthersMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality)
@@ -592,7 +519,14 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
_ uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, []any) {
) (string, []any, error) {
if query.Aggregations[0].SpaceAggregation.IsZero() {
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
)
}
sb := sqlbuilder.NewSelectBuilder()
sb.Select("ts")
@@ -609,7 +543,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
}
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
@@ -641,9 +575,7 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
quantile,
))
sb.From("__spatial_aggregation_cte")
for _, g := range query.GroupBy {
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
@@ -659,6 +591,8 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
sb.Where(rewrittenExpr)
}
}
sb.OrderBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.OrderBy("ts")
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return &qbtypes.Statement{Query: combined + q, Args: append(args, a...)}, nil

View File

@@ -50,7 +50,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
},
expectedErr: nil,
@@ -83,7 +83,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
},
expectedErr: nil,
@@ -116,7 +116,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
},
expectedErr: nil,
@@ -148,7 +148,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
},
expectedErr: nil,
@@ -181,7 +181,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
},
expectedErr: nil,
@@ -210,7 +210,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947390000), uint64(1747983420000), 0},
},
expectedErr: nil,

View File

@@ -3,6 +3,7 @@ package telemetrymetrics
import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
)
@@ -168,7 +169,7 @@ func AggregationColumnForSamplesTable(
temporality metrictypes.Temporality,
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) string {
) (string, error) {
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
var aggregationColumn string
switch temporality {
@@ -298,5 +299,12 @@ func AggregationColumnForSamplesTable(
}
}
}
return aggregationColumn
if aggregationColumn == "" {
return "", errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
)
}
return aggregationColumn, nil
}

View File

@@ -35,13 +35,7 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
if operator.IsStringSearchOperator() {
value = querybuilder.FormatValueForContains(value)
}

View File

@@ -152,7 +152,9 @@ func (f FilterOperator) IsStringSearchOperator() bool {
FilterOperatorILike,
FilterOperatorNotILike,
FilterOperatorLike,
FilterOperatorNotLike:
FilterOperatorNotLike,
FilterOperatorRegexp,
FilterOperatorNotRegexp:
return true
default:
return false

View File

@@ -3,6 +3,7 @@ package querybuildertypesv5
import (
"fmt"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -174,3 +175,54 @@ func (q *QueryBuilderQuery[T]) Normalize() {
}
}
// Fastpath (no fingerprint grouping)
// canShortCircuitDelta returns true if we can use the optimized query
// for the given query
// This is used to avoid the group by fingerprint thus improving the performance
// for certain queries
// cases where we can short circuit:
// 1. time aggregation = (rate|increase) and space aggregation = sum
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
//
// 2. time aggregation = sum and space aggregation = sum
// - sum of sums is same as sum of all values
//
// 3. time aggregation = min and space aggregation = min
// - min of mins is same as min of all values
//
// 4. time aggregation = max and space aggregation = max
// - max of maxs is same as max of all values
//
// 5. special case exphist, there is no need for per series/fingerprint aggregation
// we can directly use the quantilesDDMerge function
//
// all of this is true only for delta metrics
func CanShortCircuitDelta(metricAgg MetricAggregation) bool {
if metricAgg.Temporality != metrictypes.Delta {
return false
}
ta := metricAgg.TimeAggregation
sa := metricAgg.SpaceAggregation
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) &&
sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
return true
}
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
return true
}
if metricAgg.Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
return true
}
return false
}

View File

@@ -12,6 +12,7 @@ import (
var (
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
ErrBetweenValuesType = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values of the number type")
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
)

View File

@@ -75,7 +75,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypeFormula:
var spec QueryBuilderFormula
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "formula spec"); err != nil {
return wrapUnmarshalError(err, "invalid formula spec: %v", err)
}
@@ -83,7 +83,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypeJoin:
var spec QueryBuilderJoin
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "join spec"); err != nil {
return wrapUnmarshalError(err, "invalid join spec: %v", err)
}
@@ -98,7 +98,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypePromQL:
var spec PromQuery
// TODO: use json.Unmarshal here after implementing custom unmarshaler for PromQuery
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for PromQuery
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "PromQL spec"); err != nil {
return wrapUnmarshalError(err, "invalid PromQL spec: %v", err)
}
@@ -106,7 +106,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypeClickHouseSQL:
var spec ClickHouseQuery
// TODO: use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "ClickHouse SQL spec"); err != nil {
return wrapUnmarshalError(err, "invalid ClickHouse SQL spec: %v", err)
}
@@ -439,7 +439,7 @@ func (r *QueryRangeRequest) GetQueriesSupportingZeroDefault() map[string]bool {
expr = strings.ToLower(expr)
// only pure additive/counting operations should default to zero,
// while statistical/analytical operations should show gaps when there's no data to analyze.
// TODO: use newExprVisitor for getting the function used in the expression
// TODO(srikanthccv): use newExprVisitor for getting the function used in the expression
if strings.HasPrefix(expr, "count(") ||
strings.HasPrefix(expr, "count_distinct(") ||
strings.HasPrefix(expr, "sum(") ||

View File

@@ -21,3 +21,12 @@ var (
// []Bucket (struct{Lower,Upper,Count float64}), example: histogram
RequestTypeDistribution = RequestType{valuer.NewString("distribution")}
)
// IsAggregation returns true for request types that produce aggregated results
// (time_series, scalar, distribution). For these types, fields like groupBy,
// having, aggregations, and orderBy (with aggregation key validation) are meaningful.
// For non-aggregation types (raw, raw_stream, trace), those fields are ignored
// and don't need to be validated.
func (r RequestType) IsAggregation() bool {
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
}

View File

@@ -10,54 +10,78 @@ import (
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
// queryName returns the name from any query envelope spec type.
func (e QueryEnvelope) queryName() string {
switch spec := e.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Name
case QueryBuilderQuery[LogAggregation]:
return spec.Name
case QueryBuilderQuery[MetricAggregation]:
return spec.Name
case QueryBuilderFormula:
return spec.Name
case QueryBuilderTraceOperator:
return spec.Name
case QueryBuilderJoin:
return spec.Name
case PromQuery:
return spec.Name
case ClickHouseQuery:
return spec.Name
}
return ""
}
// isDisabled returns the disabled status from any query envelope spec type.
func (e QueryEnvelope) isDisabled() bool {
switch spec := e.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
return spec.Disabled
case QueryBuilderQuery[LogAggregation]:
return spec.Disabled
case QueryBuilderQuery[MetricAggregation]:
return spec.Disabled
case QueryBuilderFormula:
return spec.Disabled
case QueryBuilderTraceOperator:
return spec.Disabled
case QueryBuilderJoin:
return spec.Disabled
case PromQuery:
return spec.Disabled
case ClickHouseQuery:
return spec.Disabled
}
return false
}
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
name := envelope.queryName()
var typeLabel string
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if spec.Name != "" {
return fmt.Sprintf("query '%s'", spec.Name)
}
return fmt.Sprintf("trace query at position %d", index+1)
case QueryBuilderQuery[LogAggregation]:
if spec.Name != "" {
return fmt.Sprintf("query '%s'", spec.Name)
}
return fmt.Sprintf("log query at position %d", index+1)
case QueryBuilderQuery[MetricAggregation]:
if spec.Name != "" {
return fmt.Sprintf("query '%s'", spec.Name)
}
return fmt.Sprintf("metric query at position %d", index+1)
}
typeLabel = "query"
case QueryTypeFormula:
if spec, ok := envelope.Spec.(QueryBuilderFormula); ok && spec.Name != "" {
return fmt.Sprintf("formula '%s'", spec.Name)
}
return fmt.Sprintf("formula at position %d", index+1)
typeLabel = "formula"
case QueryTypeTraceOperator:
if spec, ok := envelope.Spec.(QueryBuilderTraceOperator); ok && spec.Name != "" {
return fmt.Sprintf("trace operator '%s'", spec.Name)
}
return fmt.Sprintf("trace operator at position %d", index+1)
typeLabel = "trace operator"
case QueryTypeJoin:
if spec, ok := envelope.Spec.(QueryBuilderJoin); ok && spec.Name != "" {
return fmt.Sprintf("join '%s'", spec.Name)
}
return fmt.Sprintf("join at position %d", index+1)
typeLabel = "join"
case QueryTypePromQL:
if spec, ok := envelope.Spec.(PromQuery); ok && spec.Name != "" {
return fmt.Sprintf("PromQL query '%s'", spec.Name)
}
return fmt.Sprintf("PromQL query at position %d", index+1)
typeLabel = "PromQL query"
case QueryTypeClickHouseSQL:
if spec, ok := envelope.Spec.(ClickHouseQuery); ok && spec.Name != "" {
return fmt.Sprintf("ClickHouse query '%s'", spec.Name)
}
return fmt.Sprintf("ClickHouse query at position %d", index+1)
typeLabel = "ClickHouse query"
default:
typeLabel = "query"
}
return fmt.Sprintf("query at position %d", index+1)
if name != "" {
return fmt.Sprintf("%s '%s'", typeLabel, name)
}
return fmt.Sprintf("%s at position %d", typeLabel, index+1)
}
const (
@@ -72,11 +96,12 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
return err
}
// Validate aggregations only for non-raw request types
if requestType != RequestTypeRaw && requestType != RequestTypeRawStream && requestType != RequestTypeTrace {
if err := q.validateAggregations(); err != nil {
return err
}
if err := q.validateAggregations(requestType); err != nil {
return err
}
if err := q.validateGroupBy(requestType); err != nil {
return err
}
// Validate limit and pagination
@@ -94,32 +119,23 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
return err
}
if requestType != RequestTypeRaw && requestType != RequestTypeTrace && len(q.Aggregations) > 0 {
if err := q.validateOrderByForAggregation(); err != nil {
return err
}
} else {
if err := q.validateOrderBy(); err != nil {
return err
}
if err := q.validateOrderBy(requestType); err != nil {
return err
}
if requestType != RequestTypeRaw && requestType != RequestTypeTrace {
if err := q.validateHaving(); err != nil {
return err
}
}
if requestType == RequestTypeRaw {
if err := q.validateSelectFields(); err != nil {
return err
}
if err := q.validateSelectFields(requestType); err != nil {
return err
}
return nil
}
func (q *QueryBuilderQuery[T]) validateSelectFields() error {
func (q *QueryBuilderQuery[T]) validateSelectFields(requestType RequestType) error {
// selectFields don't apply to aggregation queries, skip validation
if requestType.IsAggregation() {
return nil
}
// isRoot and isEntryPoint are returned by the Metadata API, so if someone sends them, we have to reject the request.
for _, v := range q.SelectFields {
if v.Name == "isRoot" || v.Name == "isEntryPoint" {
@@ -132,6 +148,21 @@ func (q *QueryBuilderQuery[T]) validateSelectFields() error {
return nil
}
func (q *QueryBuilderQuery[T]) validateGroupBy(requestType RequestType) error {
// groupBy doesn't apply to non-aggregation queries, skip validation
if !requestType.IsAggregation() {
return nil
}
for idx, item := range q.GroupBy {
if item.TelemetryFieldKey.Name == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput, "invalid empty key name for group by at index %d", idx,
)
}
}
return nil
}
func (q *QueryBuilderQuery[T]) validateSignal() error {
// Signal validation is handled during unmarshaling in req.go
// Valid signals are: metrics, traces, logs
@@ -152,7 +183,12 @@ func (q *QueryBuilderQuery[T]) validateSignal() error {
}
}
func (q *QueryBuilderQuery[T]) validateAggregations() error {
func (q *QueryBuilderQuery[T]) validateAggregations(requestType RequestType) error {
// aggregations don't apply to non-aggregation queries, skip validation
if !requestType.IsAggregation() {
return nil
}
// At least one aggregation required for non-disabled queries
if len(q.Aggregations) == 0 && !q.Disabled {
return errors.NewInvalidInputf(
@@ -179,14 +215,6 @@ func (q *QueryBuilderQuery[T]) validateAggregations() error {
aggId,
)
}
// Validate metric-specific aggregations
if err := validateMetricAggregation(v); err != nil {
aggId := fmt.Sprintf("aggregation #%d", i+1)
if q.Name != "" {
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
}
return wrapValidationError(err, aggId, "invalid metric %s: %s")
}
case TraceAggregation:
if v.Expression == "" {
aggId := fmt.Sprintf("aggregation #%d", i+1)
@@ -301,7 +329,7 @@ func (q *QueryBuilderQuery[T]) validateSecondaryAggregations() error {
return nil
}
func (q *QueryBuilderQuery[T]) validateOrderBy() error {
func (q *QueryBuilderQuery[T]) validateOrderBy(requestType RequestType) error {
for i, order := range q.Order {
// Direction validation is handled by the OrderDirection type
if order.Direction != OrderDirectionAsc && order.Direction != OrderDirectionDesc {
@@ -319,6 +347,12 @@ func (q *QueryBuilderQuery[T]) validateOrderBy() error {
)
}
}
// aggregation-specific order key validation only applies to aggregation queries
if requestType.IsAggregation() {
return q.validateOrderByForAggregation()
}
return nil
}
@@ -328,10 +362,6 @@ func (q *QueryBuilderQuery[T]) validateOrderBy() error {
// 2. Aggregation expressions or aliases
// 3. Aggregation index (0, 1, 2, etc.)
func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
// First validate basic order by constraints
if err := q.validateOrderBy(); err != nil {
return err
}
validOrderKeys := make(map[string]bool)
@@ -401,22 +431,6 @@ func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
return nil
}
func (q *QueryBuilderQuery[T]) validateHaving() error {
if q.Having == nil || q.Having.Expression == "" {
return nil
}
// ensure that having is only used with aggregations
if len(q.Aggregations) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"having clause can only be used with aggregation queries. Use `filter.expression` instead",
)
}
return nil
}
// ValidateQueryRangeRequest validates the entire query range request
func (r *QueryRangeRequest) Validate() error {
// Validate time range
@@ -456,236 +470,20 @@ func (r *QueryRangeRequest) Validate() error {
// validateAllQueriesNotDisabled validates that at least one query in the composite query is enabled
func (r *QueryRangeRequest) validateAllQueriesNotDisabled() error {
allDisabled := true
for _, envelope := range r.CompositeQuery.Queries {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if !spec.Disabled {
allDisabled = false
}
case QueryBuilderQuery[LogAggregation]:
if !spec.Disabled {
allDisabled = false
}
case QueryBuilderQuery[MetricAggregation]:
if !spec.Disabled {
allDisabled = false
}
}
case QueryTypeFormula:
if spec, ok := envelope.Spec.(QueryBuilderFormula); ok && !spec.Disabled {
allDisabled = false
}
case QueryTypeTraceOperator:
if spec, ok := envelope.Spec.(QueryBuilderTraceOperator); ok && !spec.Disabled {
allDisabled = false
}
case QueryTypeJoin:
if spec, ok := envelope.Spec.(QueryBuilderJoin); ok && !spec.Disabled {
allDisabled = false
}
case QueryTypePromQL:
if spec, ok := envelope.Spec.(PromQuery); ok && !spec.Disabled {
allDisabled = false
}
case QueryTypeClickHouseSQL:
if spec, ok := envelope.Spec.(ClickHouseQuery); ok && !spec.Disabled {
allDisabled = false
}
}
// Early exit if we find at least one enabled query
if !allDisabled {
break
if !envelope.isDisabled() {
return nil
}
}
if allDisabled {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"all queries are disabled - at least one query must be enabled",
)
}
return nil
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"all queries are disabled - at least one query must be enabled",
)
}
func (r *QueryRangeRequest) validateCompositeQuery() error {
// Validate queries in composite query
if len(r.CompositeQuery.Queries) == 0 {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"at least one query is required",
)
}
// Track query names for uniqueness (only for non-formula queries)
queryNames := make(map[string]bool)
// Validate each query based on its type
for i, envelope := range r.CompositeQuery.Queries {
switch envelope.Type {
case QueryTypeBuilder, QueryTypeSubQuery:
// Validate based on the concrete type
switch spec := envelope.Spec.(type) {
case QueryBuilderQuery[TraceAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
if spec.Name != "" {
if queryNames[spec.Name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
spec.Name,
)
}
queryNames[spec.Name] = true
}
case QueryBuilderQuery[LogAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
if spec.Name != "" {
if queryNames[spec.Name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
spec.Name,
)
}
queryNames[spec.Name] = true
}
case QueryBuilderQuery[MetricAggregation]:
if err := spec.Validate(r.RequestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for non-formula context
if spec.Name != "" {
if queryNames[spec.Name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
spec.Name,
)
}
queryNames[spec.Name] = true
}
default:
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown spec type for %s",
queryId,
)
}
case QueryTypeFormula:
// Formula validation is handled separately
spec, ok := envelope.Spec.(QueryBuilderFormula)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Expression == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
queryId,
)
}
case QueryTypeJoin:
// Join validation is handled separately
_, ok := envelope.Spec.(QueryBuilderJoin)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
case QueryTypeTraceOperator:
spec, ok := envelope.Spec.(QueryBuilderTraceOperator)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Expression == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"expression is required for %s",
queryId,
)
}
case QueryTypePromQL:
// PromQL validation is handled separately
spec, ok := envelope.Spec.(PromQuery)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
case QueryTypeClickHouseSQL:
// ClickHouse SQL validation is handled separately
spec, ok := envelope.Spec.(ClickHouseQuery)
if !ok {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid spec for %s",
queryId,
)
}
if spec.Query == "" {
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"query expression is required for %s",
queryId,
)
}
default:
queryId := getQueryIdentifier(envelope, i)
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"unknown query type '%s' for %s",
envelope.Type,
queryId,
).WithAdditional(
"Valid query types are: builder_query, builder_formula, builder_join, promql, clickhouse_sql, trace_operator",
)
}
}
return nil
return r.CompositeQuery.Validate(r.RequestType)
}
// Validate performs validation on CompositeQuery
@@ -697,12 +495,29 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
)
}
// Validate each query
// Track query names for uniqueness (only for builder queries)
queryNames := make(map[string]bool)
for i, envelope := range c.Queries {
if err := validateQueryEnvelope(envelope, requestType); err != nil {
queryId := getQueryIdentifier(envelope, i)
return wrapValidationError(err, queryId, "invalid %s: %s")
}
// Check name uniqueness for builder queries
if envelope.Type == QueryTypeBuilder || envelope.Type == QueryTypeSubQuery {
name := envelope.queryName()
if name != "" {
if queryNames[name] {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"duplicate query name '%s'",
name,
)
}
queryNames[name] = true
}
}
}
return nil
@@ -803,85 +618,3 @@ func validateQueryEnvelope(envelope QueryEnvelope, requestType RequestType) erro
)
}
}
// validateMetricAggregation validates metric-specific aggregation parameters
func validateMetricAggregation(agg MetricAggregation) error {
// we can't decide anything here without known temporality
if agg.Temporality == metrictypes.Unknown {
return nil
}
// Validate that rate/increase are only used with appropriate temporalities
if agg.TimeAggregation == metrictypes.TimeAggregationRate || agg.TimeAggregation == metrictypes.TimeAggregationIncrease {
// For gauge metrics (Unspecified temporality), rate/increase doesn't make sense
if agg.Temporality == metrictypes.Unspecified {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"rate/increase aggregation cannot be used with gauge metrics (unspecified temporality)",
)
}
}
// Validate percentile aggregations are only used with histogram types
if agg.SpaceAggregation.IsPercentile() {
if agg.Type != metrictypes.HistogramType && agg.Type != metrictypes.ExpHistogramType && agg.Type != metrictypes.SummaryType {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"percentile aggregation can only be used with histogram or summary metric types",
)
}
}
// Validate time aggregation values
validTimeAggregations := []metrictypes.TimeAggregation{
metrictypes.TimeAggregationUnspecified,
metrictypes.TimeAggregationLatest,
metrictypes.TimeAggregationSum,
metrictypes.TimeAggregationAvg,
metrictypes.TimeAggregationMin,
metrictypes.TimeAggregationMax,
metrictypes.TimeAggregationCount,
metrictypes.TimeAggregationCountDistinct,
metrictypes.TimeAggregationRate,
metrictypes.TimeAggregationIncrease,
}
validTimeAgg := slices.Contains(validTimeAggregations, agg.TimeAggregation)
if !validTimeAgg {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid time aggregation: %s",
agg.TimeAggregation.StringValue(),
).WithAdditional(
"Valid time aggregations: latest, sum, avg, min, max, count, count_distinct, rate, increase",
)
}
// Validate space aggregation values
validSpaceAggregations := []metrictypes.SpaceAggregation{
metrictypes.SpaceAggregationUnspecified,
metrictypes.SpaceAggregationSum,
metrictypes.SpaceAggregationAvg,
metrictypes.SpaceAggregationMin,
metrictypes.SpaceAggregationMax,
metrictypes.SpaceAggregationCount,
metrictypes.SpaceAggregationPercentile50,
metrictypes.SpaceAggregationPercentile75,
metrictypes.SpaceAggregationPercentile90,
metrictypes.SpaceAggregationPercentile95,
metrictypes.SpaceAggregationPercentile99,
}
validSpaceAgg := slices.Contains(validSpaceAggregations, agg.SpaceAggregation)
if !validSpaceAgg {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"invalid space aggregation: %s",
agg.SpaceAggregation.StringValue(),
).WithAdditional(
"Valid space aggregations: sum, avg, min, max, count, p50, p75, p90, p95, p99",
)
}
return nil
}

View File

@@ -333,6 +333,617 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
}
}
func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
tests := []struct {
name string
request QueryRangeRequest
wantErr bool
errMsg string
}{
{
name: "empty composite query should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{},
},
},
wantErr: true,
errMsg: "at least one query is required",
},
{
name: "duplicate builder query names should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[LogAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalLogs,
},
},
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalTraces,
},
},
},
},
},
wantErr: true,
errMsg: "duplicate query name 'A'",
},
{
name: "duplicate names across log and metric builder queries should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[LogAggregation]{
Name: "X",
Disabled: true,
Signal: telemetrytypes.SignalLogs,
},
},
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[MetricAggregation]{
Name: "X",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
},
},
},
},
},
wantErr: true,
errMsg: "duplicate query name 'X'",
},
{
name: "same name on formula and builder should not conflict",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
},
},
},
{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "A",
Expression: "A + 1",
},
},
},
},
},
wantErr: false,
},
{
name: "formula with empty expression should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "F1",
Expression: "",
},
},
},
},
},
wantErr: true,
errMsg: "expression is required",
},
{
name: "promql with empty query should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypePromQL,
Spec: PromQuery{
Name: "P1",
Query: "",
},
},
},
},
},
wantErr: true,
errMsg: "PromQL query is required",
},
{
name: "clickhouse with empty query should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeClickHouseSQL,
Spec: ClickHouseQuery{
Name: "CH1",
Query: "",
},
},
},
},
},
wantErr: true,
errMsg: "ClickHouse SQL query is required",
},
{
name: "trace operator with empty expression should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeTraceOperator,
Spec: QueryBuilderTraceOperator{
Name: "TO1",
Expression: "",
},
},
},
},
},
wantErr: true,
errMsg: "expression is required",
},
{
name: "valid promql query should pass",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypePromQL,
Spec: PromQuery{
Name: "P1",
Query: "up",
},
},
},
},
},
wantErr: false,
},
{
name: "valid clickhouse query should pass",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeClickHouseSQL,
Spec: ClickHouseQuery{
Name: "CH1",
Query: "SELECT count() FROM logs",
},
},
},
},
},
wantErr: false,
},
{
name: "valid mixed queries with unique builder names should pass",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeTimeSeries,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
},
},
},
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "B",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{Expression: "count()"},
},
},
},
{
Type: QueryTypePromQL,
Spec: PromQuery{
Name: "C",
Query: "up",
},
},
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := tt.request.Validate()
if tt.wantErr {
if err == nil {
t.Errorf("Validate() expected error but got none")
return
}
if tt.errMsg != "" && !contains(err.Error(), tt.errMsg) {
t.Errorf("Validate() error = %v, want to contain %v", err.Error(), tt.errMsg)
}
} else {
if err != nil {
t.Errorf("Validate() unexpected error = %v", err)
}
}
})
}
}
func TestValidateQueryEnvelope(t *testing.T) {
tests := []struct {
name string
envelope QueryEnvelope
requestType RequestType
wantErr bool
errMsg string
}{
{
name: "valid builder query with trace aggregation",
envelope: QueryEnvelope{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{Expression: "count()"},
},
},
},
requestType: RequestTypeTimeSeries,
wantErr: false,
},
{
name: "valid formula with expression",
envelope: QueryEnvelope{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "F1",
Expression: "A + B",
},
},
requestType: RequestTypeTimeSeries,
wantErr: false,
},
{
name: "formula with empty expression should fail",
envelope: QueryEnvelope{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "F1",
Expression: "",
},
},
requestType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "expression is required",
},
{
name: "valid join spec",
envelope: QueryEnvelope{
Type: QueryTypeJoin,
Spec: QueryBuilderJoin{
Name: "J1",
},
},
requestType: RequestTypeTimeSeries,
wantErr: false,
},
{
name: "valid trace operator",
envelope: QueryEnvelope{
Type: QueryTypeTraceOperator,
Spec: QueryBuilderTraceOperator{
Name: "TO1",
Expression: "count()",
},
},
requestType: RequestTypeTimeSeries,
wantErr: false,
},
{
name: "trace operator with empty expression should fail",
envelope: QueryEnvelope{
Type: QueryTypeTraceOperator,
Spec: QueryBuilderTraceOperator{
Name: "TO1",
Expression: "",
},
},
requestType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "expression is required",
},
{
name: "promql with empty query should fail",
envelope: QueryEnvelope{
Type: QueryTypePromQL,
Spec: PromQuery{
Name: "P1",
Query: "",
},
},
requestType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "PromQL query is required",
},
{
name: "clickhouse with empty query should fail",
envelope: QueryEnvelope{
Type: QueryTypeClickHouseSQL,
Spec: ClickHouseQuery{
Name: "CH1",
Query: "",
},
},
requestType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "ClickHouse SQL query is required",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := validateQueryEnvelope(tt.envelope, tt.requestType)
if tt.wantErr {
if err == nil {
t.Errorf("validateQueryEnvelope() expected error but got none")
return
}
if tt.errMsg != "" && !contains(err.Error(), tt.errMsg) {
t.Errorf("validateQueryEnvelope() error = %v, want to contain %v", err.Error(), tt.errMsg)
}
} else {
if err != nil {
t.Errorf("validateQueryEnvelope() unexpected error = %v", err)
}
}
})
}
}
func TestQueryEnvelope_Helpers(t *testing.T) {
t.Run("queryName", func(t *testing.T) {
tests := []struct {
name string
envelope QueryEnvelope
want string
}{
{
name: "trace builder query",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[TraceAggregation]{Name: "A"}},
want: "A",
},
{
name: "log builder query",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Name: "B"}},
want: "B",
},
{
name: "metric builder query",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[MetricAggregation]{Name: "C"}},
want: "C",
},
{
name: "formula",
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{Name: "F1"}},
want: "F1",
},
{
name: "promql",
envelope: QueryEnvelope{Type: QueryTypePromQL, Spec: PromQuery{Name: "P1"}},
want: "P1",
},
{
name: "clickhouse",
envelope: QueryEnvelope{Type: QueryTypeClickHouseSQL, Spec: ClickHouseQuery{Name: "CH1"}},
want: "CH1",
},
{
name: "trace operator",
envelope: QueryEnvelope{Type: QueryTypeTraceOperator, Spec: QueryBuilderTraceOperator{Name: "TO1"}},
want: "TO1",
},
{
name: "join",
envelope: QueryEnvelope{Type: QueryTypeJoin, Spec: QueryBuilderJoin{Name: "J1"}},
want: "J1",
},
{
name: "empty name",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{}},
want: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.envelope.queryName()
if got != tt.want {
t.Errorf("queryName() = %q, want %q", got, tt.want)
}
})
}
})
t.Run("isDisabled", func(t *testing.T) {
tests := []struct {
name string
envelope QueryEnvelope
want bool
}{
{
name: "enabled builder query",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Disabled: false}},
want: false,
},
{
name: "disabled builder query",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Disabled: true}},
want: true,
},
{
name: "disabled formula",
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{Disabled: true}},
want: true,
},
{
name: "enabled promql",
envelope: QueryEnvelope{Type: QueryTypePromQL, Spec: PromQuery{Disabled: false}},
want: false,
},
{
name: "disabled clickhouse",
envelope: QueryEnvelope{Type: QueryTypeClickHouseSQL, Spec: ClickHouseQuery{Disabled: true}},
want: true,
},
{
name: "disabled trace operator",
envelope: QueryEnvelope{Type: QueryTypeTraceOperator, Spec: QueryBuilderTraceOperator{Disabled: true}},
want: true,
},
{
name: "disabled join",
envelope: QueryEnvelope{Type: QueryTypeJoin, Spec: QueryBuilderJoin{Disabled: true}},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.envelope.isDisabled()
if got != tt.want {
t.Errorf("isDisabled() = %v, want %v", got, tt.want)
}
})
}
})
}
func TestGetQueryIdentifier(t *testing.T) {
tests := []struct {
name string
envelope QueryEnvelope
index int
want string
}{
{
name: "builder query with name",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Name: "A"}},
index: 0,
want: "query 'A'",
},
{
name: "builder query without name",
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{}},
index: 2,
want: "query at position 3",
},
{
name: "formula with name",
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{Name: "F1"}},
index: 0,
want: "formula 'F1'",
},
{
name: "formula without name",
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{}},
index: 1,
want: "formula at position 2",
},
{
name: "promql with name",
envelope: QueryEnvelope{Type: QueryTypePromQL, Spec: PromQuery{Name: "P1"}},
index: 0,
want: "PromQL query 'P1'",
},
{
name: "clickhouse with name",
envelope: QueryEnvelope{Type: QueryTypeClickHouseSQL, Spec: ClickHouseQuery{Name: "CH1"}},
index: 0,
want: "ClickHouse query 'CH1'",
},
{
name: "trace operator with name",
envelope: QueryEnvelope{Type: QueryTypeTraceOperator, Spec: QueryBuilderTraceOperator{Name: "TO1"}},
index: 0,
want: "trace operator 'TO1'",
},
{
name: "join without name",
envelope: QueryEnvelope{Type: QueryTypeJoin, Spec: QueryBuilderJoin{}},
index: 0,
want: "join at position 1",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := getQueryIdentifier(tt.envelope, tt.index)
if got != tt.want {
t.Errorf("getQueryIdentifier() = %q, want %q", got, tt.want)
}
})
}
}
func TestQueryRangeRequest_ValidateOrderByForAggregation(t *testing.T) {
tests := []struct {
name string
@@ -512,4 +1123,139 @@ func TestQueryRangeRequest_ValidateOrderByForAggregation(t *testing.T) {
}
})
}
}
}
func TestRequestType_IsAggregation(t *testing.T) {
tests := []struct {
name string
requestType RequestType
want bool
}{
{"time_series is aggregation", RequestTypeTimeSeries, true},
{"scalar is aggregation", RequestTypeScalar, true},
{"distribution is aggregation", RequestTypeDistribution, true},
{"raw is not aggregation", RequestTypeRaw, false},
{"raw_stream is not aggregation", RequestTypeRawStream, false},
{"trace is not aggregation", RequestTypeTrace, false},
{"unknown is not aggregation", RequestTypeUnknown, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.requestType.IsAggregation()
if got != tt.want {
t.Errorf("IsAggregation() = %v, want %v", got, tt.want)
}
})
}
}
func TestNonAggregationFieldsSkipped(t *testing.T) {
// Fields that only apply to aggregation queries (groupBy, having, aggregations)
// should be silently skipped for non-aggregation request types.
t.Run("groupBy ignored for raw request type", func(t *testing.T) {
query := QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
GroupBy: []GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
},
}
err := query.Validate(RequestTypeRaw)
if err != nil {
t.Errorf("expected no error for groupBy with raw request type, got: %v", err)
}
})
t.Run("groupBy validated for timeseries request type", func(t *testing.T) {
query := QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
},
GroupBy: []GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: ""}},
},
}
err := query.Validate(RequestTypeTimeSeries)
if err == nil {
t.Errorf("expected error for empty groupBy key with timeseries request type")
}
})
t.Run("having ignored for raw request type", func(t *testing.T) {
query := QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Having: &Having{Expression: "count() > 10"},
}
err := query.Validate(RequestTypeRaw)
if err != nil {
t.Errorf("expected no error for having with raw request type, got: %v", err)
}
})
t.Run("having ignored for trace request type", func(t *testing.T) {
query := QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Having: &Having{Expression: "count() > 10"},
}
err := query.Validate(RequestTypeTrace)
if err != nil {
t.Errorf("expected no error for having with trace request type, got: %v", err)
}
})
t.Run("aggregations ignored for raw request type", func(t *testing.T) {
query := QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
},
}
err := query.Validate(RequestTypeRaw)
if err != nil {
t.Errorf("expected no error for aggregations with raw request type, got: %v", err)
}
})
t.Run("aggregations ignored for raw_stream request type", func(t *testing.T) {
query := QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{
{Expression: "count()"},
},
}
err := query.Validate(RequestTypeRawStream)
if err != nil {
t.Errorf("expected no error for aggregations with raw_stream request type, got: %v", err)
}
})
t.Run("selectFields validated for raw but not timeseries", func(t *testing.T) {
query := QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
Aggregations: []TraceAggregation{
{Expression: "count()"},
},
SelectFields: []telemetrytypes.TelemetryFieldKey{
{Name: "isRoot"},
},
}
// Should error for raw (selectFields are validated)
err := query.Validate(RequestTypeRaw)
if err == nil {
t.Errorf("expected error for isRoot in selectFields with raw request type")
}
// Should pass for timeseries (selectFields skipped)
err = query.Validate(RequestTypeTimeSeries)
if err != nil {
t.Errorf("expected no error for isRoot in selectFields with timeseries request type, got: %v", err)
}
})
}

View File

@@ -78,18 +78,18 @@ func (f *TelemetryFieldKey) ArrayParentSelectors() []*FieldKeySelector {
func (f TelemetryFieldKey) String() string {
var sb strings.Builder
sb.WriteString(fmt.Sprintf("name=%s", f.Name))
fmt.Fprintf(&sb, "name=%s", f.Name)
if f.FieldContext != FieldContextUnspecified {
sb.WriteString(fmt.Sprintf(",context=%s", f.FieldContext.String))
fmt.Fprintf(&sb, ",context=%s", f.FieldContext.String)
}
if f.FieldDataType != FieldDataTypeUnspecified {
sb.WriteString(fmt.Sprintf(",datatype=%s", f.FieldDataType.StringValue()))
fmt.Fprintf(&sb, ",datatype=%s", f.FieldDataType.StringValue())
}
if f.Materialized {
sb.WriteString(",materialized=true")
}
if f.JSONDataType != nil {
sb.WriteString(fmt.Sprintf(",jsondatatype=%s", f.JSONDataType.StringValue()))
fmt.Fprintf(&sb, ",jsondatatype=%s", f.JSONDataType.StringValue())
}
if len(f.Indexes) > 0 {
sb.WriteString(",indexes=[")
@@ -97,7 +97,7 @@ func (f TelemetryFieldKey) String() string {
if i > 0 {
sb.WriteString("; ")
}
sb.WriteString(fmt.Sprintf("{type=%s, columnExpr=%s, indexExpr=%s}", index.Type.StringValue(), index.ColumnExpression, index.IndexExpression))
fmt.Fprintf(&sb, "{type=%s, columnExpr=%s, indexExpr=%s}", index.Type.StringValue(), index.ColumnExpression, index.IndexExpression)
}
sb.WriteString("]")
}
@@ -108,6 +108,17 @@ func (f TelemetryFieldKey) Text() string {
return TelemetryFieldKeyToText(&f)
}
// OverrideMetadataFrom copies the resolved metadata fields from src into f.
// This is used when adjusting user-provided keys to match known field definitions.
func (f *TelemetryFieldKey) OverrideMetadataFrom(src *TelemetryFieldKey) {
f.FieldContext = src.FieldContext
f.FieldDataType = src.FieldDataType
f.JSONDataType = src.JSONDataType
f.Indexes = src.Indexes
f.Materialized = src.Materialized
f.JSONPlan = src.JSONPlan
}
func (f *TelemetryFieldKey) Equal(key *TelemetryFieldKey) bool {
return f.Name == key.Name &&
f.FieldContext == key.FieldContext &&
@@ -225,11 +236,19 @@ func TelemetryFieldKeyToText(key *TelemetryFieldKey) string {
}
func FieldKeyToMaterializedColumnName(key *TelemetryFieldKey) string {
return fmt.Sprintf("`%s_%s_%s`", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
return fmt.Sprintf("`%s_%s_%s`",
key.FieldContext.String,
fieldDataTypes[key.FieldDataType.StringValue()].StringValue(),
strings.ReplaceAll(key.Name, ".", "$$"),
)
}
func FieldKeyToMaterializedColumnNameForExists(key *TelemetryFieldKey) string {
return fmt.Sprintf("`%s_%s_%s_exists`", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
return fmt.Sprintf("`%s_%s_%s_exists`",
key.FieldContext.String,
fieldDataTypes[key.FieldDataType.StringValue()].StringValue(),
strings.ReplaceAll(key.Name, ".", "$$"),
)
}
type TelemetryFieldValues struct {

View File

@@ -14,10 +14,10 @@ from fixtures.alertutils import (
from fixtures.logger import setup_logger
from fixtures.utils import get_testdata_file_path
# test cases for match type and compare operators have wait time of 30 seconds to verify the alert expectation.
# we've poistioned the alert data to fire the alert on first eval of rule manager, the eval frequency
# for most alert rules are set of 15s so considering this delay plus some delay from alert manager's
# group_wait and group_interval, even in worst case most alerts should be triggered in about 30 seconds
# Alert test cases use a 30-second wait time to verify expected alert firing.
# Alert data is set up to trigger on the first rule manager evaluation.
# With a 15-second eval frequency for most rules, plus alertmanager's
# group_wait and group_interval delays, alerts should fire well within 30 seconds.
TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
types.AlertTestCase(
name="test_threshold_above_at_least_once",
@@ -25,6 +25,7 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
alert_data=[
types.AlertData(
type="metrics",
# active requests dummy data
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
@@ -115,30 +116,28 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last, pylint: disable=W0511
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed
# types.AlertTestCase(
# name="test_threshold_above_last",
# rule_path="alerts/test_scenarios/threshold_above_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_above_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_above_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_above_last",
rule_path="alerts/test_scenarios/threshold_above_last/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_last/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_above_last",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_below_at_least_once",
rule_path="alerts/test_scenarios/threshold_below_at_least_once/rule.json",
@@ -189,6 +188,7 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
alert_data=[
types.AlertData(
type="metrics",
# one rate ~5 + rest 0.01 so it remains in total below 10
data_path="alerts/test_scenarios/threshold_below_in_total/alert_data.jsonl",
),
],
@@ -227,30 +227,28 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last,
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
# types.AlertTestCase(
# name="test_threshold_below_last",
# rule_path="alerts/test_scenarios/threshold_below_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_below_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_below_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_below_last",
rule_path="alerts/test_scenarios/threshold_below_last/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_below_last/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_below_last",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_equal_to_at_least_once",
rule_path="alerts/test_scenarios/threshold_equal_to_at_least_once/rule.json",
@@ -339,30 +337,28 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last,
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
# types.AlertTestCase(
# name="test_threshold_equal_to_last",
# rule_path="alerts/test_scenarios/threshold_equal_to_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_equal_to_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_equal_to_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_equal_to_last",
rule_path="alerts/test_scenarios/threshold_equal_to_last/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_equal_to_last/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_equal_to_last",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_not_equal_to_at_least_once",
rule_path="alerts/test_scenarios/threshold_not_equal_to_at_least_once/rule.json",
@@ -451,30 +447,28 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last,
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
# types.AlertTestCase(
# name="test_threshold_not_equal_to_last",
# rule_path="alerts/test_scenarios/threshold_not_equal_to_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_not_equal_to_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_not_equal_to_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_not_equal_to_last",
rule_path="alerts/test_scenarios/threshold_not_equal_to_last/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_not_equal_to_last/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_not_equal_to_last",
"threshold.name": "critical",
}
),
],
),
),
]
# test cases unit conversion

File diff suppressed because it is too large Load Diff

View File

@@ -54,17 +54,17 @@ def test_rate_with_steady_values_and_reset(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 59
assert len(result_values) >= 58
# the counter reset happened at 31st minute
assert (
result_values[30]["value"] == 0.0167
result_values[29]["value"] == 0.0167
) # i.e 2/120 i.e 29th to 31st minute changes
assert (
result_values[31]["value"] == 0.133
result_values[30]["value"] == 0.133
) # i.e 10/60 i.e 31st to 32nd minute changes
count_of_steady_rate = sum(1 for v in result_values if v["value"] == 0.0833)
assert (
count_of_steady_rate >= 56
count_of_steady_rate >= 55
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:

View File

@@ -72,16 +72,17 @@ def test_with_steady_values_and_reset(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 59
assert len(result_values) >= 58
# the counter reset happened at 31st minute
assert result_values[30]["value"] == expected_value_at_31st_minute
assert result_values[31]["value"] == expected_value_at_32nd_minute
# we skip the rate value for the first data point without previous value
assert result_values[29]["value"] == expected_value_at_31st_minute
assert result_values[30]["value"] == expected_value_at_32nd_minute
assert (
result_values[39]["value"] == steady_value
) # 39th minute is when cumulative shifts to delta
result_values[38]["value"] == steady_value
) # 38th minute is when cumulative shifts to delta
count_of_steady_rate = sum(1 for v in result_values if v["value"] == steady_value)
assert (
count_of_steady_rate >= 56
count_of_steady_rate >= 55
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:
@@ -316,12 +317,12 @@ def test_for_service_with_switch(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 60
assert result_values[30]["value"] == expected_value_at_30th_minute # 0.183
assert result_values[31]["value"] == expected_value_at_31st_minute # 0.183
assert result_values[38]["value"] == value_at_switch # 0.25
assert len(result_values) >= 59
assert result_values[29]["value"] == expected_value_at_30th_minute # 0.183
assert result_values[30]["value"] == expected_value_at_31st_minute # 0.183
assert result_values[37]["value"] == value_at_switch # 0.25
assert (
result_values[39]["value"] == value_at_switch # 0.25
result_values[38]["value"] == value_at_switch # 0.25
) # 39th minute is when cumulative shifts to delta
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:

View File

@@ -1,12 +1,12 @@
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:01:00+00:00","value":1,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:02:00+00:00","value":2,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:03:00+00:00","value":3,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:04:00+00:00","value":4,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:05:00+00:00","value":19,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:06:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:07:00+00:00","value":35,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:08:00+00:00","value":36,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:09:00+00:00","value":37,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:10:00+00:00","value":38,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:11:00+00:00","value":39,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:12:00+00:00","value":40,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:01:00+00:00","value":1,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:02:00+00:00","value":2,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:03:00+00:00","value":4,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:04:00+00:00","value":4,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:05:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:06:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:07:00+00:00","value":36,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:08:00+00:00","value":25,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:09:00+00:00","value":37,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:10:00+00:00","value":35,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:11:00+00:00","value":39,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:12:00+00:00","value":25,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}

View File

@@ -25,7 +25,7 @@
"type": "clickhouse_sql",
"spec": {
"name": "A",
"query": "WITH __temporal_aggregation_cte AS (\n SELECT \n fingerprint, \n toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60)) AS ts, \n avg(value) AS per_series_value \n FROM signoz_metrics.distributed_samples_v4 AS points \n INNER JOIN (\n SELECT fingerprint \n FROM signoz_metrics.time_series_v4 \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND LOWER(temporality) LIKE LOWER('cumulative') \n AND __normalized = false \n GROUP BY fingerprint\n ) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND unix_milli >= {{.start_timestamp_ms}} \n AND unix_milli < {{.end_timestamp_ms}} \n GROUP BY fingerprint, ts \n ORDER BY fingerprint, ts\n), \n__spatial_aggregation_cte AS (\n SELECT \n ts, \n avg(per_series_value) AS value \n FROM __temporal_aggregation_cte \n WHERE isNaN(per_series_value) = 0 \n GROUP BY ts\n) \nSELECT * FROM __spatial_aggregation_cte \nORDER BY ts"
"query": "WITH __temporal_aggregation_cte AS (\n SELECT \n fingerprint, \n toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60)) AS ts, \n avg(value) AS per_series_value \n FROM signoz_metrics.distributed_samples_v4 AS points \n INNER JOIN (\n SELECT fingerprint \n FROM signoz_metrics.time_series_v4 \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND LOWER(temporality) LIKE LOWER('cumulative') \n AND __normalized = false \n GROUP BY fingerprint\n ) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND unix_milli >= $start_timestamp_ms \n AND unix_milli < $end_timestamp_ms \n GROUP BY fingerprint, ts \n ORDER BY fingerprint, ts\n), \n__spatial_aggregation_cte AS (\n SELECT \n ts, \n sum(per_series_value) AS value \n FROM __temporal_aggregation_cte \n WHERE isNaN(per_series_value) = 0 \n GROUP BY ts\n) \nSELECT * FROM __spatial_aggregation_cte \nORDER BY ts"
}
}
]

View File

@@ -1,12 +1,12 @@
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":31,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":46,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":58,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":71,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":76,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":81,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":86,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":91,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":12,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":31,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":23,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":58,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":71,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":45,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":81,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":86,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":91,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}

View File

@@ -1,12 +1,12 @@
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":30,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":40,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":50,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":55,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":60,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":65,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":70,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":75,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":45,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":60,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":65,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":34,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":75,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}

View File

@@ -1,12 +1,12 @@
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":524288,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":1048576,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":1572864,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":2097152,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":3770016,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":5642880,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":10515744,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":11038632,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":11561520,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":12084408,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":12607296,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":13130184,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":524288,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":1048576,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":1572864,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":2097152,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":3770016,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":5642880,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":10515744,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":11038632,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":11561520,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":12084408,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":12607296,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":13130184,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}