mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-16 22:22:14 +00:00
Compare commits
2 Commits
fix-issues
...
nv/10282
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da7f62a5d3 | ||
|
|
052cb01e00 |
@@ -176,6 +176,25 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
|
||||
|
||||
### Unsere Projektbetreuer
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Dokumentation
|
||||
|
||||
28
README.md
28
README.md
@@ -221,6 +221,34 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
|
||||
|
||||
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
|
||||
|
||||
### Project maintainers
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
|
||||
- [Ekansh Gupta](https://github.com/eKuG)
|
||||
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Vikrant Gupta](https://github.com/vikrantgupta25)
|
||||
- [Sagar Rajput](https://github.com/SagarRajput-7)
|
||||
- [Shaheer Kochai](https://github.com/ahmadshaheer)
|
||||
- [Amlan Kumar Nandy](https://github.com/amlannandy)
|
||||
- [Sahil Khan](https://github.com/sawhil)
|
||||
- [Aditya Singh](https://github.com/aks07)
|
||||
- [Abhi Kumar](https://github.com/ahrefabhi)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
|
||||
@@ -187,6 +187,25 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
|
||||
|
||||
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
|
||||
|
||||
### 项目维护人员
|
||||
|
||||
#### 后端
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### 前端
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### 运维开发
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 文档
|
||||
|
||||
@@ -294,6 +294,7 @@ flagger:
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
|
||||
@@ -73,7 +73,7 @@ describe('convertV5ResponseToLegacy', () => {
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'time_series',
|
||||
data: { results: [timeSeries] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('time_series', [
|
||||
@@ -156,7 +156,7 @@ describe('convertV5ResponseToLegacy', () => {
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'scalar',
|
||||
data: { results: [scalar] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('scalar', [
|
||||
@@ -239,7 +239,7 @@ describe('convertV5ResponseToLegacy', () => {
|
||||
const v5Data: QueryRangeResponseV5 = {
|
||||
type: 'scalar',
|
||||
data: { results: [scalar] },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
|
||||
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
|
||||
};
|
||||
|
||||
const params = makeBaseParams('scalar', [
|
||||
|
||||
@@ -388,7 +388,6 @@ export function convertV5ResponseToLegacy(
|
||||
warnings: v5Data?.data?.warning || [],
|
||||
},
|
||||
warning: v5Data?.warning || undefined,
|
||||
meta: v5Data?.meta,
|
||||
},
|
||||
warning: v5Data?.warning || undefined,
|
||||
};
|
||||
@@ -407,7 +406,6 @@ export function convertV5ResponseToLegacy(
|
||||
payload: {
|
||||
data: convertedData,
|
||||
warning: v5Response.payload?.data?.warning || undefined,
|
||||
meta: v5Data?.meta,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -78,10 +78,12 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
|
||||
describe('VariableItem Integration Tests', () => {
|
||||
let user: ReturnType<typeof userEvent.setup>;
|
||||
let mockOnValueUpdate: jest.Mock;
|
||||
let mockSetVariablesToGetUpdated: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
user = userEvent.setup();
|
||||
mockOnValueUpdate = jest.fn();
|
||||
mockSetVariablesToGetUpdated = jest.fn();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
@@ -100,6 +102,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -145,6 +150,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -187,6 +195,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -236,6 +247,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -258,6 +272,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -291,6 +308,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -324,6 +344,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -346,6 +369,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -379,6 +405,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -432,6 +461,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -476,6 +508,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -513,6 +548,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
@@ -544,6 +582,9 @@ describe('VariableItem Integration Tests', () => {
|
||||
variableData={variable}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={null}
|
||||
/>
|
||||
</TestWrapper>,
|
||||
);
|
||||
|
||||
@@ -9,15 +9,11 @@ import {
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
enqueueFetchOfAllVariables,
|
||||
initializeVariableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { onUpdateVariableNode } from './util';
|
||||
import VariableItem from './VariableItem';
|
||||
|
||||
import './DashboardVariableSelection.styles.scss';
|
||||
@@ -26,6 +22,8 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
const {
|
||||
setSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
} = useDashboard();
|
||||
|
||||
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
|
||||
@@ -57,14 +55,11 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
[dependencyData?.order],
|
||||
);
|
||||
|
||||
// Initialize fetch store then start a new fetch cycle.
|
||||
// Runs on dependency order changes, and time range changes.
|
||||
// Trigger refetch when dependency order changes or global time changes
|
||||
useEffect(() => {
|
||||
const allVariableNames = sortedVariablesArray
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
initializeVariableFetchStore(allVariableNames);
|
||||
enqueueFetchOfAllVariables();
|
||||
if (dependencyData?.order && dependencyData.order.length > 0) {
|
||||
setVariablesToGetUpdated(dependencyData?.order || []);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [dependencyOrderKey, minTime, maxTime]);
|
||||
|
||||
@@ -126,14 +121,29 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
return prev;
|
||||
});
|
||||
|
||||
// Cascade: enqueue query-type descendants for refetching
|
||||
enqueueDescendantsOfVariable(name);
|
||||
if (dependencyData) {
|
||||
const updatedVariables: string[] = [];
|
||||
onUpdateVariableNode(
|
||||
name,
|
||||
dependencyData.graph,
|
||||
dependencyData.order,
|
||||
(node) => updatedVariables.push(node),
|
||||
);
|
||||
setVariablesToGetUpdated((prev) => [
|
||||
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
|
||||
]);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
|
||||
}
|
||||
},
|
||||
[
|
||||
// This can be removed
|
||||
dashboardVariables,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dependencyData,
|
||||
updateUrlVariable,
|
||||
setSelectedDashboard,
|
||||
setVariablesToGetUpdated,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -148,6 +158,9 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
existingVariables={dashboardVariables}
|
||||
variableData={variable}
|
||||
onValueUpdate={onValueUpdate}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -2,25 +2,18 @@ import { memo, useCallback, useMemo, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
|
||||
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import {
|
||||
buildExistingDynamicVariableQuery,
|
||||
extractErrorMessage,
|
||||
getOptionsForDynamicVariable,
|
||||
mergeUniqueStrings,
|
||||
settleVariableFetch,
|
||||
} from './util';
|
||||
import { getOptionsForDynamicVariable } from './util';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
|
||||
|
||||
@@ -31,6 +24,7 @@ type DynamicVariableInputProps = Pick<
|
||||
'variableData' | 'onValueUpdate' | 'existingVariables'
|
||||
>;
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function DynamicVariableInput({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
@@ -61,8 +55,14 @@ function DynamicVariableInput({
|
||||
|
||||
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
|
||||
|
||||
// Build a memoized list of all currently available option strings (normalized + related)
|
||||
const allAvailableOptionStrings = useMemo(
|
||||
() => mergeUniqueStrings(optionsData, relatedValues),
|
||||
() => [
|
||||
...new Set([
|
||||
...optionsData.map((v) => v.toString()),
|
||||
...relatedValues.map((v) => v.toString()),
|
||||
]),
|
||||
],
|
||||
[optionsData, relatedValues],
|
||||
);
|
||||
|
||||
@@ -104,24 +104,67 @@ function DynamicVariableInput({
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
variableFetchCycleId,
|
||||
isVariableSettled,
|
||||
isVariableFetching,
|
||||
hasVariableFetchedOnce,
|
||||
isVariableWaitingForDependencies,
|
||||
variableDependencyWaitMessage,
|
||||
} = useVariableFetchState(variableData.name || '');
|
||||
// existing query is the query made from the other dynamic variables around this one with there current values
|
||||
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const existingQuery = useMemo(() => {
|
||||
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const existingQuery = useMemo(
|
||||
() =>
|
||||
buildExistingDynamicVariableQuery(
|
||||
existingVariables,
|
||||
variableData.id,
|
||||
!!variableData.dynamicVariablesAttribute,
|
||||
),
|
||||
[existingVariables, variableData.id, variableData.dynamicVariablesAttribute],
|
||||
);
|
||||
const queryParts: string[] = [];
|
||||
|
||||
Object.entries(existingVariables).forEach(([, variable]) => {
|
||||
// Skip the current variable being processed
|
||||
if (variable.id === variableData.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Only include dynamic variables that have selected values and are not selected as ALL
|
||||
if (
|
||||
variable.type === 'DYNAMIC' &&
|
||||
variable.dynamicVariablesAttribute &&
|
||||
variable.selectedValue &&
|
||||
!isEmpty(variable.selectedValue) &&
|
||||
(variable.showALLOption ? !variable.allSelected : true)
|
||||
) {
|
||||
const attribute = variable.dynamicVariablesAttribute;
|
||||
const values = Array.isArray(variable.selectedValue)
|
||||
? variable.selectedValue
|
||||
: [variable.selectedValue];
|
||||
|
||||
// Filter out empty values and convert to strings
|
||||
const validValues = values
|
||||
.filter((val) => val !== null && val !== undefined && val !== '')
|
||||
.map((val) => val.toString());
|
||||
|
||||
if (validValues.length > 0) {
|
||||
// Format values for query - wrap strings in quotes, keep numbers as is
|
||||
const formattedValues = validValues.map((val) => {
|
||||
// Check if value is a number
|
||||
const numValue = Number(val);
|
||||
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
|
||||
return val; // Keep as number
|
||||
}
|
||||
// Escape single quotes and wrap in quotes
|
||||
return `'${val.replace(/'/g, "\\'")}'`;
|
||||
});
|
||||
|
||||
if (formattedValues.length === 1) {
|
||||
queryParts.push(`${attribute} = ${formattedValues[0]}`);
|
||||
} else {
|
||||
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return queryParts.join(' AND ');
|
||||
}, [
|
||||
existingVariables,
|
||||
variableData.id,
|
||||
variableData.dynamicVariablesAttribute,
|
||||
]);
|
||||
|
||||
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
|
||||
const handleSelectDropdownVisibilityChange = useCallback(
|
||||
@@ -139,73 +182,6 @@ function DynamicVariableInput({
|
||||
[onDropdownVisibleChange, optionsData, originalRelatedValues],
|
||||
);
|
||||
|
||||
const handleQuerySuccess = useCallback(
|
||||
(data: SuccessResponseV2<FieldValueResponse>): void => {
|
||||
const newNormalizedValues = data.data?.normalizedValues || [];
|
||||
const newRelatedValues = data.data?.relatedValues || [];
|
||||
|
||||
if (!debouncedApiSearchText) {
|
||||
setOptionsData(newNormalizedValues);
|
||||
setIsComplete(data.data?.complete || false);
|
||||
}
|
||||
setFilteredOptionsData(newNormalizedValues);
|
||||
setRelatedValues(newRelatedValues);
|
||||
setOriginalRelatedValues(newRelatedValues);
|
||||
|
||||
// Sync temp selection with latest API values when ALL is active and dropdown is open
|
||||
if (variableData.allSelected && isDropdownOpen) {
|
||||
const latestValues = mergeUniqueStrings(
|
||||
newNormalizedValues,
|
||||
newRelatedValues,
|
||||
);
|
||||
|
||||
const currentStrings = Array.isArray(tempSelection)
|
||||
? tempSelection.map((v) => v.toString())
|
||||
: tempSelection
|
||||
? [tempSelection.toString()]
|
||||
: [];
|
||||
|
||||
const areSame =
|
||||
currentStrings.length === latestValues.length &&
|
||||
latestValues.every((v) => currentStrings.includes(v));
|
||||
|
||||
if (!areSame) {
|
||||
setTempSelection(latestValues);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
if (!debouncedApiSearchText) {
|
||||
applyDefaultIfNeeded(
|
||||
mergeUniqueStrings(newNormalizedValues, newRelatedValues),
|
||||
);
|
||||
}
|
||||
|
||||
settleVariableFetch(variableData.name, 'complete');
|
||||
},
|
||||
[
|
||||
debouncedApiSearchText,
|
||||
variableData.allSelected,
|
||||
variableData.name,
|
||||
isDropdownOpen,
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
applyDefaultIfNeeded,
|
||||
],
|
||||
);
|
||||
|
||||
const handleQueryError = useCallback(
|
||||
(error: { message?: string } | null): void => {
|
||||
if (error) {
|
||||
setErrorMessage(extractErrorMessage(error));
|
||||
setIsRetryableError(checkIfRetryableError(error));
|
||||
}
|
||||
|
||||
settleVariableFetch(variableData.name, 'failure');
|
||||
},
|
||||
[variableData.name],
|
||||
);
|
||||
|
||||
const { isLoading, refetch } = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
@@ -216,22 +192,13 @@ function DynamicVariableInput({
|
||||
debouncedApiSearchText,
|
||||
variableData.dynamicVariablesSource,
|
||||
variableData.dynamicVariablesAttribute,
|
||||
variableFetchCycleId,
|
||||
],
|
||||
{
|
||||
/*
|
||||
* enabled if
|
||||
* - we have dynamic variable source and attribute defined (ALWAYS)
|
||||
* - AND
|
||||
* - we're either still fetching variable options
|
||||
* - OR
|
||||
* - if variable is in idle state and we have already fetched options for it
|
||||
**/
|
||||
enabled:
|
||||
variableData.type === 'DYNAMIC' &&
|
||||
!!variableData.dynamicVariablesSource &&
|
||||
!!variableData.dynamicVariablesAttribute &&
|
||||
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
|
||||
queryFn: ({ signal }) =>
|
||||
!!variableData.dynamicVariablesAttribute,
|
||||
queryFn: () =>
|
||||
getFieldValues(
|
||||
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
|
||||
? undefined
|
||||
@@ -244,10 +211,70 @@ function DynamicVariableInput({
|
||||
minTime,
|
||||
maxTime,
|
||||
existingQuery,
|
||||
signal,
|
||||
),
|
||||
onSuccess: handleQuerySuccess,
|
||||
onError: handleQueryError,
|
||||
onSuccess: (data) => {
|
||||
const newNormalizedValues = data.data?.normalizedValues || [];
|
||||
const newRelatedValues = data.data?.relatedValues || [];
|
||||
|
||||
if (!debouncedApiSearchText) {
|
||||
setOptionsData(newNormalizedValues);
|
||||
setIsComplete(data.data?.complete || false);
|
||||
}
|
||||
setFilteredOptionsData(newNormalizedValues);
|
||||
setRelatedValues(newRelatedValues);
|
||||
setOriginalRelatedValues(newRelatedValues);
|
||||
|
||||
// Only run auto-check logic when necessary to avoid performance issues
|
||||
if (variableData.allSelected && isDropdownOpen) {
|
||||
// Build the latest full list from API (normalized + related)
|
||||
const latestValues = [
|
||||
...new Set([
|
||||
...newNormalizedValues.map((v) => v.toString()),
|
||||
...newRelatedValues.map((v) => v.toString()),
|
||||
]),
|
||||
];
|
||||
|
||||
// Update temp selection to exactly reflect latest API values when ALL is active
|
||||
const currentStrings = Array.isArray(tempSelection)
|
||||
? tempSelection.map((v) => v.toString())
|
||||
: tempSelection
|
||||
? [tempSelection.toString()]
|
||||
: [];
|
||||
const areSame =
|
||||
currentStrings.length === latestValues.length &&
|
||||
latestValues.every((v) => currentStrings.includes(v));
|
||||
if (!areSame) {
|
||||
setTempSelection(latestValues);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
if (!debouncedApiSearchText) {
|
||||
const allNewOptions = [
|
||||
...new Set([
|
||||
...newNormalizedValues.map((v) => v.toString()),
|
||||
...newRelatedValues.map((v) => v.toString()),
|
||||
]),
|
||||
];
|
||||
applyDefaultIfNeeded(allNewOptions);
|
||||
}
|
||||
},
|
||||
onError: (error: any) => {
|
||||
if (error) {
|
||||
let message = SOMETHING_WENT_WRONG;
|
||||
if (error?.message) {
|
||||
message = error?.message;
|
||||
} else {
|
||||
message =
|
||||
'Please make sure configuration is valid and you have required setup and permissions';
|
||||
}
|
||||
setErrorMessage(message);
|
||||
|
||||
// Check if error is retryable (5xx) or not (4xx)
|
||||
const isRetryable = checkIfRetryableError(error);
|
||||
setIsRetryableError(isRetryable);
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
@@ -309,8 +336,6 @@ function DynamicVariableInput({
|
||||
showRetryButton={isRetryableError}
|
||||
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
|
||||
onSearch={handleSearch}
|
||||
waiting={isVariableWaitingForDependencies}
|
||||
waitingMessage={variableDependencyWaitMessage}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
|
||||
import sortValues from 'lib/dashboardVariables/sortVariableValues';
|
||||
import { isArray, isEmpty, isString } from 'lodash-es';
|
||||
import { isArray, isString } from 'lodash-es';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
@@ -13,18 +12,26 @@ import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { variablePropsToPayloadVariables } from '../utils';
|
||||
import SelectVariableInput from './SelectVariableInput';
|
||||
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
|
||||
import { areArraysEqual, settleVariableFetch } from './util';
|
||||
import { areArraysEqual, checkAPIInvocation } from './util';
|
||||
import { VariableItemProps } from './VariableItem';
|
||||
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
|
||||
|
||||
type QueryVariableInputProps = Pick<
|
||||
VariableItemProps,
|
||||
'variableData' | 'existingVariables' | 'onValueUpdate'
|
||||
| 'variableData'
|
||||
| 'existingVariables'
|
||||
| 'onValueUpdate'
|
||||
| 'variablesToGetUpdated'
|
||||
| 'setVariablesToGetUpdated'
|
||||
| 'dependencyData'
|
||||
>;
|
||||
|
||||
function QueryVariableInput({
|
||||
variableData,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
onValueUpdate,
|
||||
}: QueryVariableInputProps): JSX.Element {
|
||||
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
|
||||
@@ -36,15 +43,6 @@ function QueryVariableInput({
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const {
|
||||
variableFetchCycleId,
|
||||
isVariableSettled,
|
||||
isVariableFetching,
|
||||
hasVariableFetchedOnce,
|
||||
isVariableWaitingForDependencies,
|
||||
variableDependencyWaitMessage,
|
||||
} = useVariableFetchState(variableData.name || '');
|
||||
|
||||
const {
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
@@ -62,6 +60,16 @@ function QueryVariableInput({
|
||||
strategy: queryVariableSelectStrategy,
|
||||
});
|
||||
|
||||
const validVariableUpdate = useCallback((): boolean => {
|
||||
if (!variableData.name) {
|
||||
return false;
|
||||
}
|
||||
return Boolean(
|
||||
variablesToGetUpdated.length &&
|
||||
variablesToGetUpdated[0] === variableData.name,
|
||||
);
|
||||
}, [variableData.name, variablesToGetUpdated]);
|
||||
|
||||
const getOptions = useCallback(
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
(variablesRes: VariableResponseProps | null): void => {
|
||||
@@ -95,24 +103,18 @@ function QueryVariableInput({
|
||||
valueNotInList = true;
|
||||
}
|
||||
|
||||
if (variableData.name && (valueNotInList || variableData.allSelected)) {
|
||||
// variablesData.allSelected is added for the case where on change of options we need to update the
|
||||
// local storage
|
||||
if (
|
||||
variableData.name &&
|
||||
(validVariableUpdate() || valueNotInList || variableData.allSelected)
|
||||
) {
|
||||
if (
|
||||
variableData.allSelected &&
|
||||
variableData.multiSelect &&
|
||||
variableData.showALLOption
|
||||
) {
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
onValueUpdate(
|
||||
variableData.name,
|
||||
variableData.id,
|
||||
newOptionsData,
|
||||
true,
|
||||
);
|
||||
}
|
||||
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
|
||||
|
||||
// Update tempSelection to maintain ALL state when dropdown is open
|
||||
if (tempSelection !== undefined) {
|
||||
@@ -130,11 +132,7 @@ function QueryVariableInput({
|
||||
newOptionsData.every((option) => selectedValue.includes(option));
|
||||
}
|
||||
|
||||
if (
|
||||
variableData.name &&
|
||||
variableData.id &&
|
||||
!isEmpty(variableData.selectedValue)
|
||||
) {
|
||||
if (variableData.name && variableData.id) {
|
||||
onValueUpdate(variableData.name, variableData.id, value, allSelected);
|
||||
}
|
||||
}
|
||||
@@ -143,6 +141,10 @@ function QueryVariableInput({
|
||||
setOptionsData(newOptionsData);
|
||||
// Apply default if no value is selected (e.g., new variable, first load)
|
||||
applyDefaultIfNeeded(newOptionsData);
|
||||
} else {
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((name) => name !== variableData.name),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -155,6 +157,8 @@ function QueryVariableInput({
|
||||
onValueUpdate,
|
||||
tempSelection,
|
||||
setTempSelection,
|
||||
validVariableUpdate,
|
||||
setVariablesToGetUpdated,
|
||||
applyDefaultIfNeeded,
|
||||
],
|
||||
);
|
||||
@@ -165,28 +169,27 @@ function QueryVariableInput({
|
||||
variableData.name || '',
|
||||
`${minTime}`,
|
||||
`${maxTime}`,
|
||||
variableFetchCycleId,
|
||||
JSON.stringify(dependencyData?.order),
|
||||
],
|
||||
{
|
||||
/*
|
||||
* enabled if
|
||||
* - we're either still fetching variable options
|
||||
* - OR
|
||||
* - if variable is in idle state and we have already fetched options for it
|
||||
**/
|
||||
enabled: isVariableFetching || (isVariableSettled && hasVariableFetchedOnce),
|
||||
queryFn: ({ signal }) =>
|
||||
dashboardVariablesQuery(
|
||||
{
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
},
|
||||
signal,
|
||||
enabled:
|
||||
variableData &&
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
dependencyData?.parentDependencyGraph,
|
||||
),
|
||||
queryFn: () =>
|
||||
dashboardVariablesQuery({
|
||||
query: variableData.queryValue || '',
|
||||
variables: variablePropsToPayloadVariables(existingVariables),
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
onSuccess: (response) => {
|
||||
getOptions(response.payload);
|
||||
settleVariableFetch(variableData.name, 'complete');
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
onError: (error: {
|
||||
details: {
|
||||
@@ -203,7 +206,9 @@ function QueryVariableInput({
|
||||
}
|
||||
setErrorMessage(message);
|
||||
}
|
||||
settleVariableFetch(variableData.name, 'failure');
|
||||
setVariablesToGetUpdated((prev) =>
|
||||
prev.filter((v) => v !== variableData.name),
|
||||
);
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -237,8 +242,6 @@ function QueryVariableInput({
|
||||
loading={isLoading}
|
||||
errorMessage={errorMessage}
|
||||
onRetry={handleRetry}
|
||||
waiting={isVariableWaitingForDependencies}
|
||||
waitingMessage={variableDependencyWaitMessage}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -28,8 +28,6 @@ interface SelectVariableInputProps {
|
||||
showRetryButton?: boolean;
|
||||
showIncompleteDataMessage?: boolean;
|
||||
onSearch?: (searchTerm: string) => void;
|
||||
waiting?: boolean;
|
||||
waitingMessage?: string;
|
||||
}
|
||||
|
||||
const MAX_TAG_DISPLAY_VALUES = 10;
|
||||
@@ -67,7 +65,6 @@ function SelectVariableInput({
|
||||
showRetryButton,
|
||||
showIncompleteDataMessage,
|
||||
onSearch,
|
||||
waitingMessage,
|
||||
}: SelectVariableInputProps): JSX.Element {
|
||||
const commonProps = useMemo(
|
||||
() => ({
|
||||
@@ -81,6 +78,7 @@ function SelectVariableInput({
|
||||
className: 'variable-select',
|
||||
popupClassName: 'dropdown-styles',
|
||||
getPopupContainer: popupContainer,
|
||||
style: SelectItemStyle,
|
||||
showSearch: true,
|
||||
bordered: false,
|
||||
|
||||
@@ -88,8 +86,6 @@ function SelectVariableInput({
|
||||
'data-testid': 'variable-select',
|
||||
onChange,
|
||||
loading,
|
||||
waitingMessage,
|
||||
style: SelectItemStyle,
|
||||
options,
|
||||
errorMessage,
|
||||
onRetry,
|
||||
@@ -105,7 +101,6 @@ function SelectVariableInput({
|
||||
defaultValue,
|
||||
onChange,
|
||||
loading,
|
||||
waitingMessage,
|
||||
options,
|
||||
value,
|
||||
errorMessage,
|
||||
|
||||
@@ -47,6 +47,15 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -61,6 +70,15 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -76,6 +94,15 @@ describe('VariableItem', () => {
|
||||
variableData={mockVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -109,6 +136,15 @@ describe('VariableItem', () => {
|
||||
variableData={mockCustomVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -131,6 +167,15 @@ describe('VariableItem', () => {
|
||||
variableData={customVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
@@ -145,6 +190,15 @@ describe('VariableItem', () => {
|
||||
variableData={mockCustomVariableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={(): void => {}}
|
||||
dependencyData={{
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
}}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { memo } from 'react';
|
||||
import { InfoCircleOutlined } from '@ant-design/icons';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import CustomVariableInput from './CustomVariableInput';
|
||||
@@ -20,12 +21,18 @@ export interface VariableItemProps {
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dependencyData: IDependencyData | null;
|
||||
}
|
||||
|
||||
function VariableItem({
|
||||
variableData,
|
||||
onValueUpdate,
|
||||
existingVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dependencyData,
|
||||
}: VariableItemProps): JSX.Element {
|
||||
const { name, description, type: variableType } = variableData;
|
||||
|
||||
@@ -58,6 +65,9 @@ function VariableItem({
|
||||
variableData={variableData}
|
||||
onValueUpdate={onValueUpdate}
|
||||
existingVariables={existingVariables}
|
||||
variablesToGetUpdated={variablesToGetUpdated}
|
||||
setVariablesToGetUpdated={setVariablesToGetUpdated}
|
||||
dependencyData={dependencyData}
|
||||
/>
|
||||
)}
|
||||
{variableType === 'DYNAMIC' && (
|
||||
|
||||
@@ -7,19 +7,6 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DynamicVariableInput from '../DynamicVariableInput';
|
||||
|
||||
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
|
||||
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
|
||||
useVariableFetchState: (): Record<string, unknown> => ({
|
||||
variableFetchCycleId: 0,
|
||||
variableFetchState: 'loading',
|
||||
isVariableSettled: false,
|
||||
isVariableFetching: true,
|
||||
hasVariableFetchedOnce: false,
|
||||
isVariableWaitingForDependencies: false,
|
||||
variableDependencyWaitMessage: '',
|
||||
}),
|
||||
}));
|
||||
|
||||
// Don't mock the components - use real ones
|
||||
|
||||
// Mock for useQuery
|
||||
@@ -230,10 +217,9 @@ describe('DynamicVariableInput Component', () => {
|
||||
'',
|
||||
'Traces',
|
||||
'service.name',
|
||||
0, // variableFetchCycleId
|
||||
],
|
||||
expect.objectContaining({
|
||||
enabled: true, // isVariableFetching is true from mock
|
||||
enabled: true, // Type is 'DYNAMIC'
|
||||
queryFn: expect.any(Function),
|
||||
onSuccess: expect.any(Function),
|
||||
onError: expect.any(Function),
|
||||
|
||||
@@ -8,6 +8,15 @@ import '@testing-library/jest-dom/extend-expect';
|
||||
import VariableItem from '../VariableItem';
|
||||
|
||||
const mockOnValueUpdate = jest.fn();
|
||||
const mockSetVariablesToGetUpdated = jest.fn();
|
||||
|
||||
const baseDependencyData = {
|
||||
order: [],
|
||||
graph: {},
|
||||
parentDependencyGraph: {},
|
||||
transitiveDescendants: {},
|
||||
hasCycle: false,
|
||||
};
|
||||
|
||||
const TEST_VARIABLE_ID = 'test_variable';
|
||||
const VARIABLE_SELECT_TESTID = 'variable-select';
|
||||
@@ -23,6 +32,9 @@ const renderVariableItem = (
|
||||
variableData={variableData}
|
||||
existingVariables={{}}
|
||||
onValueUpdate={mockOnValueUpdate}
|
||||
variablesToGetUpdated={[]}
|
||||
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
|
||||
dependencyData={baseDependencyData}
|
||||
/>
|
||||
</MockQueryClientProvider>,
|
||||
);
|
||||
|
||||
@@ -2,12 +2,14 @@ import {
|
||||
buildDependencies,
|
||||
buildDependencyGraph,
|
||||
buildParentDependencyGraph,
|
||||
checkAPIInvocation,
|
||||
onUpdateVariableNode,
|
||||
VariableGraph,
|
||||
} from '../util';
|
||||
import {
|
||||
buildDependenciesMock,
|
||||
buildGraphMock,
|
||||
checkAPIInvocationMock,
|
||||
onUpdateVariableNodeMock,
|
||||
} from './mock';
|
||||
|
||||
@@ -70,6 +72,97 @@ describe('dashboardVariables - utilities and processors', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkAPIInvocation', () => {
|
||||
const {
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
} = checkAPIInvocationMock;
|
||||
|
||||
const mockRootElement = {
|
||||
name: 'deployment_environment',
|
||||
key: '036a47cd-9ffc-47de-9f27-0329198964a8',
|
||||
id: '036a47cd-9ffc-47de-9f27-0329198964a8',
|
||||
modificationUUID: '5f71b591-f583-497c-839d-6a1590c3f60f',
|
||||
selectedValue: 'production',
|
||||
type: 'QUERY',
|
||||
// ... other properties omitted for brevity
|
||||
} as any;
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should return false when variableData is empty', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
variablesToGetUpdated,
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should return true when parentDependencyGraph is empty', () => {
|
||||
expect(
|
||||
checkAPIInvocation(variablesToGetUpdated, variableData, {}),
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('variable sequences', () => {
|
||||
it('should return true for valid sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
['k8s_node_name', 'k8s_namespace_name'],
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should return false for invalid sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
['k8s_cluster_name', 'k8s_node_name', 'k8s_namespace_name'],
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should return false when variableData is not in sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
['deployment_environment', 'service_name', 'endpoint'],
|
||||
variableData,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('root element behavior', () => {
|
||||
it('should return true for valid root element sequence', () => {
|
||||
expect(
|
||||
checkAPIInvocation(
|
||||
[
|
||||
'deployment_environment',
|
||||
'service_name',
|
||||
'endpoint',
|
||||
'http_status_code',
|
||||
],
|
||||
mockRootElement,
|
||||
parentDependencyGraph,
|
||||
),
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should return true for empty variablesToGetUpdated array', () => {
|
||||
expect(
|
||||
checkAPIInvocation([], mockRootElement, parentDependencyGraph),
|
||||
).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Graph Building Utilities', () => {
|
||||
const { graph } = buildGraphMock;
|
||||
const { variables } = buildDependenciesMock;
|
||||
@@ -144,72 +237,6 @@ describe('dashboardVariables - utilities and processors', () => {
|
||||
|
||||
expect(buildDependencyGraph(graph)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('should return empty transitiveDescendants for an empty graph', () => {
|
||||
const result = buildDependencyGraph({});
|
||||
expect(result.transitiveDescendants).toEqual({});
|
||||
expect(result.order).toEqual([]);
|
||||
expect(result.hasCycle).toBe(false);
|
||||
});
|
||||
|
||||
it('should compute transitiveDescendants for a linear chain (a -> b -> c)', () => {
|
||||
const linearGraph: VariableGraph = {
|
||||
a: ['b'],
|
||||
b: ['c'],
|
||||
c: [],
|
||||
};
|
||||
const result = buildDependencyGraph(linearGraph);
|
||||
expect(result.transitiveDescendants).toEqual({
|
||||
a: ['b', 'c'],
|
||||
b: ['c'],
|
||||
c: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should compute transitiveDescendants for a diamond dependency (a -> b, a -> c, b -> d, c -> d)', () => {
|
||||
const diamondGraph: VariableGraph = {
|
||||
a: ['b', 'c'],
|
||||
b: ['d'],
|
||||
c: ['d'],
|
||||
d: [],
|
||||
};
|
||||
const result = buildDependencyGraph(diamondGraph);
|
||||
expect(result.transitiveDescendants.a).toEqual(
|
||||
expect.arrayContaining(['b', 'c', 'd']),
|
||||
);
|
||||
expect(result.transitiveDescendants.a).toHaveLength(3);
|
||||
expect(result.transitiveDescendants.b).toEqual(['d']);
|
||||
expect(result.transitiveDescendants.c).toEqual(['d']);
|
||||
expect(result.transitiveDescendants.d).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle disconnected components in transitiveDescendants', () => {
|
||||
const disconnectedGraph: VariableGraph = {
|
||||
a: ['b'],
|
||||
b: [],
|
||||
x: ['y'],
|
||||
y: [],
|
||||
};
|
||||
const result = buildDependencyGraph(disconnectedGraph);
|
||||
expect(result.transitiveDescendants.a).toEqual(['b']);
|
||||
expect(result.transitiveDescendants.b).toEqual([]);
|
||||
expect(result.transitiveDescendants.x).toEqual(['y']);
|
||||
expect(result.transitiveDescendants.y).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty transitiveDescendants for all leaf nodes', () => {
|
||||
const leafOnlyGraph: VariableGraph = {
|
||||
a: [],
|
||||
b: [],
|
||||
c: [],
|
||||
};
|
||||
const result = buildDependencyGraph(leafOnlyGraph);
|
||||
expect(result.transitiveDescendants).toEqual({
|
||||
a: [],
|
||||
b: [],
|
||||
c: [],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildDependencies', () => {
|
||||
|
||||
@@ -1,3 +1,36 @@
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
export const checkAPIInvocationMock = {
|
||||
variablesToGetUpdated: [],
|
||||
variableData: {
|
||||
name: 'k8s_node_name',
|
||||
key: '4d71d385-beaf-4434-8dbf-c62be68049fc',
|
||||
allSelected: false,
|
||||
customValue: '',
|
||||
description: '',
|
||||
id: '4d71d385-beaf-4434-8dbf-c62be68049fc',
|
||||
modificationUUID: '77233d3c-96d7-4ccb-aa9d-11b04d563068',
|
||||
multiSelect: false,
|
||||
order: 6,
|
||||
queryValue:
|
||||
"SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}\nGROUP BY k8s_node_name",
|
||||
selectedValue: 'gke-signoz-saas-si-consumer-bsc-e2sd4-a6d430fa-gvm2',
|
||||
showALLOption: false,
|
||||
sort: 'DISABLED',
|
||||
textboxValue: '',
|
||||
type: 'QUERY',
|
||||
},
|
||||
parentDependencyGraph: {
|
||||
deployment_environment: [],
|
||||
service_name: ['deployment_environment'],
|
||||
endpoint: ['deployment_environment', 'service_name'],
|
||||
http_status_code: ['endpoint'],
|
||||
k8s_cluster_name: [],
|
||||
environment: [],
|
||||
k8s_node_name: ['k8s_cluster_name'],
|
||||
k8s_namespace_name: ['k8s_cluster_name', 'k8s_node_name'],
|
||||
},
|
||||
} as any;
|
||||
|
||||
export const onUpdateVariableNodeMock = {
|
||||
nodeToUpdate: 'deployment_environment',
|
||||
graph: {
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
import { OptionData } from 'components/NewSelect/types';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { isEmpty, isNull } from 'lodash-es';
|
||||
import {
|
||||
IDashboardVariables,
|
||||
IDependencyData,
|
||||
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
onVariableFetchComplete,
|
||||
onVariableFetchFailure,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export function areArraysEqual(
|
||||
@@ -52,16 +45,30 @@ const getDependentVariablesBasedOnVariableName = (
|
||||
}
|
||||
|
||||
return variables
|
||||
.map((variable) => {
|
||||
?.map((variable: any) => {
|
||||
if (variable.type === 'QUERY') {
|
||||
// Combined pattern for all formats
|
||||
// {{.variable_name}} - original format
|
||||
// $variable_name - dollar prefix format
|
||||
// [[variable_name]] - square bracket format
|
||||
// {{variable_name}} - without dot format
|
||||
const patterns = [
|
||||
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`, // {{.var}}
|
||||
`\\{\\{\\s*${variableName}\\s*\\}\\}`, // {{var}}
|
||||
`\\$${variableName}\\b`, // $var
|
||||
`\\[\\[\\s*${variableName}\\s*\\]\\]`, // [[var]]
|
||||
];
|
||||
const combinedRegex = new RegExp(patterns.join('|'));
|
||||
|
||||
const queryValue = variable.queryValue || '';
|
||||
if (textContainsVariableReference(queryValue, variableName)) {
|
||||
const dependVarReMatch = queryValue.match(combinedRegex);
|
||||
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
|
||||
return variable.name;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
})
|
||||
.filter((val): val is string => val !== null);
|
||||
.filter((val: string | null) => !isNull(val));
|
||||
};
|
||||
export type VariableGraph = Record<string, string[]>;
|
||||
|
||||
@@ -293,6 +300,33 @@ export const onUpdateVariableNode = (
|
||||
});
|
||||
};
|
||||
|
||||
export const checkAPIInvocation = (
|
||||
variablesToGetUpdated: string[],
|
||||
variableData: IDashboardVariable,
|
||||
parentDependencyGraph?: VariableGraph,
|
||||
): boolean => {
|
||||
if (isEmpty(variableData.name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isEmpty(parentDependencyGraph)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// if no dependency then true
|
||||
const haveDependency =
|
||||
parentDependencyGraph?.[variableData.name || '']?.length > 0;
|
||||
if (!haveDependency) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// if variable is in the list and has dependency then check if its the top element in the queue then true else false
|
||||
return (
|
||||
variablesToGetUpdated.length > 0 &&
|
||||
variablesToGetUpdated[0] === variableData.name
|
||||
);
|
||||
};
|
||||
|
||||
export const getOptionsForDynamicVariable = (
|
||||
normalizedValues: (string | number | boolean)[],
|
||||
relatedValues: string[],
|
||||
@@ -357,130 +391,3 @@ export const getSelectValue = (
|
||||
}
|
||||
return selectedValue?.toString();
|
||||
};
|
||||
|
||||
/**
|
||||
* Merges multiple arrays of values into a single deduplicated string array.
|
||||
*/
|
||||
export function mergeUniqueStrings(
|
||||
...arrays: (string | number | boolean)[][]
|
||||
): string[] {
|
||||
return [...new Set(arrays.flatMap((arr) => arr.map((v) => v.toString())))];
|
||||
}
|
||||
|
||||
function isEligibleFilterVariable(
|
||||
variable: IDashboardVariable,
|
||||
currentVariableId: string,
|
||||
): boolean {
|
||||
if (variable.id === currentVariableId) {
|
||||
return false;
|
||||
}
|
||||
if (variable.type !== 'DYNAMIC') {
|
||||
return false;
|
||||
}
|
||||
if (!variable.dynamicVariablesAttribute) {
|
||||
return false;
|
||||
}
|
||||
if (!variable.selectedValue || isEmpty(variable.selectedValue)) {
|
||||
return false;
|
||||
}
|
||||
return !(variable.showALLOption && variable.allSelected);
|
||||
}
|
||||
|
||||
function formatQueryValue(val: string): string {
|
||||
const numValue = Number(val);
|
||||
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
|
||||
return val;
|
||||
}
|
||||
return `'${val.replace(/'/g, "\\'")}'`;
|
||||
}
|
||||
|
||||
function buildQueryPart(attribute: string, values: string[]): string {
|
||||
const formatted = values.map(formatQueryValue);
|
||||
if (formatted.length === 1) {
|
||||
return `${attribute} = ${formatted[0]}`;
|
||||
}
|
||||
return `${attribute} IN [${formatted.join(', ')}]`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a filter query string from sibling dynamic variables' selected values.
|
||||
* e.g. `k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'`
|
||||
*/
|
||||
export function buildExistingDynamicVariableQuery(
|
||||
existingVariables: IDashboardVariables | null,
|
||||
currentVariableId: string,
|
||||
hasDynamicAttribute: boolean,
|
||||
): string {
|
||||
if (!existingVariables || !hasDynamicAttribute) {
|
||||
return '';
|
||||
}
|
||||
|
||||
const queryParts: string[] = [];
|
||||
|
||||
for (const variable of Object.values(existingVariables)) {
|
||||
// Skip the current variable being processed
|
||||
if (!isEligibleFilterVariable(variable, currentVariableId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const rawValues = Array.isArray(variable.selectedValue)
|
||||
? variable.selectedValue
|
||||
: [variable.selectedValue];
|
||||
|
||||
// Filter out empty values and convert to strings
|
||||
const validValues = rawValues
|
||||
.filter(
|
||||
(val): val is string | number | boolean =>
|
||||
val !== null && val !== undefined && val !== '',
|
||||
)
|
||||
.map((val) => val.toString());
|
||||
|
||||
if (validValues.length > 0 && variable.dynamicVariablesAttribute) {
|
||||
queryParts.push(
|
||||
buildQueryPart(variable.dynamicVariablesAttribute, validValues),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return queryParts.join(' AND ');
|
||||
}
|
||||
|
||||
function isVariableInActiveFetchState(state: string | undefined): boolean {
|
||||
return state === 'loading' || state === 'revalidating';
|
||||
}
|
||||
|
||||
/**
|
||||
* Completes or fails a variable's fetch state machine transition.
|
||||
* No-ops if the variable is not currently in an active fetch state.
|
||||
*/
|
||||
export function settleVariableFetch(
|
||||
name: string | undefined,
|
||||
outcome: 'complete' | 'failure',
|
||||
): void {
|
||||
if (!name) {
|
||||
return;
|
||||
}
|
||||
|
||||
const currentState = variableFetchStore.getSnapshot().states[name];
|
||||
if (!isVariableInActiveFetchState(currentState)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (outcome === 'complete') {
|
||||
onVariableFetchComplete(name);
|
||||
} else {
|
||||
onVariableFetchFailure(name);
|
||||
}
|
||||
}
|
||||
|
||||
export function extractErrorMessage(
|
||||
error: { message?: string } | null,
|
||||
): string {
|
||||
if (!error) {
|
||||
return SOMETHING_WENT_WRONG;
|
||||
}
|
||||
return (
|
||||
error.message ||
|
||||
'Please make sure configuration is valid and you have required setup and permissions'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,31 +1,4 @@
|
||||
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
|
||||
variableFetchStore: {
|
||||
getSnapshot: jest.fn(),
|
||||
},
|
||||
onVariableFetchComplete: jest.fn(),
|
||||
onVariableFetchFailure: jest.fn(),
|
||||
}));
|
||||
|
||||
import {
|
||||
onVariableFetchComplete,
|
||||
onVariableFetchFailure,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import {
|
||||
areArraysEqual,
|
||||
buildExistingDynamicVariableQuery,
|
||||
extractErrorMessage,
|
||||
mergeUniqueStrings,
|
||||
onUpdateVariableNode,
|
||||
settleVariableFetch,
|
||||
VariableGraph,
|
||||
} from './util';
|
||||
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
// Existing tests
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
|
||||
|
||||
describe('areArraysEqual', () => {
|
||||
it('should return true for equal arrays with same order', () => {
|
||||
@@ -176,348 +149,3 @@ describe('onUpdateVariableNode', () => {
|
||||
expect(visited).toEqual(['namespace', 'service', 'pod']);
|
||||
});
|
||||
});
|
||||
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
// New tests for functions added in recent commits
|
||||
// ────────────────────────────────────────────────────────────────
|
||||
|
||||
function makeDynamicVar(
|
||||
overrides: Partial<IDashboardVariable> & { id: string },
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
name: overrides.id,
|
||||
description: '',
|
||||
type: 'DYNAMIC',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
allSelected: false,
|
||||
dynamicVariablesAttribute: 'attr',
|
||||
selectedValue: 'some-value',
|
||||
...overrides,
|
||||
} as IDashboardVariable;
|
||||
}
|
||||
|
||||
describe('mergeUniqueStrings', () => {
|
||||
it('should merge two arrays and deduplicate', () => {
|
||||
expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toEqual(['a', 'b', 'c']);
|
||||
});
|
||||
|
||||
it('should convert numbers and booleans to strings', () => {
|
||||
expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toEqual([
|
||||
'1',
|
||||
'true',
|
||||
'hello',
|
||||
'2',
|
||||
'false',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should deduplicate when number and its string form both appear', () => {
|
||||
expect(mergeUniqueStrings([42], ['42'])).toEqual(['42']);
|
||||
});
|
||||
|
||||
it('should handle a single array', () => {
|
||||
expect(mergeUniqueStrings(['x', 'y', 'x'])).toEqual(['x', 'y']);
|
||||
});
|
||||
|
||||
it('should handle three or more arrays', () => {
|
||||
expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toEqual([
|
||||
'a',
|
||||
'b',
|
||||
'c',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return empty array when no arrays are provided', () => {
|
||||
expect(mergeUniqueStrings()).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return empty array when all input arrays are empty', () => {
|
||||
expect(mergeUniqueStrings([], [], [])).toEqual([]);
|
||||
});
|
||||
|
||||
it('should preserve order of first occurrence', () => {
|
||||
expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toEqual(['c', 'a', 'b']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildExistingDynamicVariableQuery', () => {
|
||||
// --- Guard clauses ---
|
||||
it('should return empty string when existingVariables is null', () => {
|
||||
expect(buildExistingDynamicVariableQuery(null, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should return empty string when hasDynamicAttribute is false', () => {
|
||||
const variables = { v2: makeDynamicVar({ id: 'v2' }) };
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', false)).toBe('');
|
||||
});
|
||||
|
||||
// --- Eligibility filtering ---
|
||||
it('should skip the current variable (same id)', () => {
|
||||
const variables = {
|
||||
v1: makeDynamicVar({
|
||||
id: 'v1',
|
||||
dynamicVariablesAttribute: 'ns',
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip non-DYNAMIC variables', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables without dynamicVariablesAttribute', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: undefined,
|
||||
selectedValue: 'val',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables with null selectedValue', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', selectedValue: null }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables with empty string selectedValue', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', selectedValue: '' }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables with empty array selectedValue', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({ id: 'v2', selectedValue: [] }),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should skip variables where showALLOption and allSelected are both true', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
showALLOption: true,
|
||||
allSelected: true,
|
||||
dynamicVariablesAttribute: 'ns',
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
|
||||
it('should include variable with showALLOption true but allSelected false', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
showALLOption: true,
|
||||
allSelected: false,
|
||||
dynamicVariablesAttribute: 'ns',
|
||||
selectedValue: 'prod',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"ns = 'prod'",
|
||||
);
|
||||
});
|
||||
|
||||
// --- Value formatting ---
|
||||
it('should quote string values in the query', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'k8s.namespace.name',
|
||||
selectedValue: 'zeus',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"k8s.namespace.name = 'zeus'",
|
||||
);
|
||||
});
|
||||
|
||||
it('should leave numeric values unquoted', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'http.status_code',
|
||||
selectedValue: '200',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
'http.status_code = 200',
|
||||
);
|
||||
});
|
||||
|
||||
it('should escape single quotes in string values', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'user.name',
|
||||
selectedValue: "O'Brien",
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"user.name = 'O\\'Brien'",
|
||||
);
|
||||
});
|
||||
|
||||
it('should build an IN clause for array selectedValue with multiple items', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'k8s.namespace.name',
|
||||
selectedValue: ['zeus', 'gene'],
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"k8s.namespace.name IN ['zeus', 'gene']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should handle mix of numeric and string values in IN clause', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'http.status_code',
|
||||
selectedValue: ['200', 'unknown'],
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"http.status_code IN [200, 'unknown']",
|
||||
);
|
||||
});
|
||||
|
||||
it('should filter out empty string values from array', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'region',
|
||||
selectedValue: ['us-east', '', 'eu-west'],
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"region IN ['us-east', 'eu-west']",
|
||||
);
|
||||
});
|
||||
|
||||
// --- Multiple siblings ---
|
||||
it('should join multiple sibling variables with AND', () => {
|
||||
const variables = {
|
||||
v2: makeDynamicVar({
|
||||
id: 'v2',
|
||||
dynamicVariablesAttribute: 'k8s.namespace.name',
|
||||
selectedValue: ['zeus', 'gene'],
|
||||
}),
|
||||
v3: makeDynamicVar({
|
||||
id: 'v3',
|
||||
dynamicVariablesAttribute: 'doc_op_type',
|
||||
selectedValue: 'test',
|
||||
}),
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
|
||||
"k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'",
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty string when no variables are eligible', () => {
|
||||
const variables = {
|
||||
v1: makeDynamicVar({ id: 'v1' }), // same as current — skipped
|
||||
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }), // not DYNAMIC
|
||||
v3: makeDynamicVar({ id: 'v3', selectedValue: null }), // no value
|
||||
};
|
||||
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('settleVariableFetch', () => {
|
||||
const mockGetSnapshot = variableFetchStore.getSnapshot as jest.Mock;
|
||||
const mockComplete = onVariableFetchComplete as jest.Mock;
|
||||
const mockFailure = onVariableFetchFailure as jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should no-op when name is undefined', () => {
|
||||
settleVariableFetch(undefined, 'complete');
|
||||
expect(mockGetSnapshot).not.toHaveBeenCalled();
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.each(['idle', 'waiting', 'error', undefined] as const)(
|
||||
'should no-op when variable state is %s',
|
||||
(state) => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: state } });
|
||||
settleVariableFetch('myVar', 'complete');
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
},
|
||||
);
|
||||
|
||||
it('should call onVariableFetchComplete when state is loading and outcome is complete', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
|
||||
settleVariableFetch('myVar', 'complete');
|
||||
expect(mockComplete).toHaveBeenCalledWith('myVar');
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call onVariableFetchComplete when state is revalidating and outcome is complete', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
|
||||
settleVariableFetch('myVar', 'complete');
|
||||
expect(mockComplete).toHaveBeenCalledWith('myVar');
|
||||
expect(mockFailure).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call onVariableFetchFailure when state is loading and outcome is failure', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
|
||||
settleVariableFetch('myVar', 'failure');
|
||||
expect(mockFailure).toHaveBeenCalledWith('myVar');
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should call onVariableFetchFailure when state is revalidating and outcome is failure', () => {
|
||||
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
|
||||
settleVariableFetch('myVar', 'failure');
|
||||
expect(mockFailure).toHaveBeenCalledWith('myVar');
|
||||
expect(mockComplete).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('extractErrorMessage', () => {
|
||||
const FALLBACK_MESSAGE =
|
||||
'Please make sure configuration is valid and you have required setup and permissions';
|
||||
|
||||
it('should return SOMETHING_WENT_WRONG when error is null', () => {
|
||||
expect(extractErrorMessage(null)).toBe('Something went wrong');
|
||||
});
|
||||
|
||||
it('should return the error message when it exists', () => {
|
||||
expect(extractErrorMessage({ message: 'Query timeout' })).toBe(
|
||||
'Query timeout',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return fallback when error object has no message property', () => {
|
||||
expect(extractErrorMessage({})).toBe(FALLBACK_MESSAGE);
|
||||
});
|
||||
|
||||
it('should return fallback when error.message is empty string', () => {
|
||||
expect(extractErrorMessage({ message: '' })).toBe(FALLBACK_MESSAGE);
|
||||
});
|
||||
|
||||
it('should return fallback when error.message is undefined', () => {
|
||||
expect(extractErrorMessage({ message: undefined })).toBe(FALLBACK_MESSAGE);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
import { VariableItemProps } from '../VariableItem';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
export interface VariableSelectStrategy {
|
||||
handleChange(params: {
|
||||
value: string | string[];
|
||||
variableData: VariableItemProps['variableData'];
|
||||
onValueUpdate: VariableItemProps['onValueUpdate'];
|
||||
variableData: IDashboardVariable;
|
||||
optionsData: (string | number | boolean)[];
|
||||
allAvailableOptionStrings: string[];
|
||||
onValueUpdate: (
|
||||
name: string,
|
||||
id: string,
|
||||
value: IDashboardVariable['selectedValue'],
|
||||
allSelected: boolean,
|
||||
haveCustomValuesSelected?: boolean,
|
||||
) => void;
|
||||
}): void;
|
||||
}
|
||||
|
||||
@@ -17,19 +17,6 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
|
||||
|
||||
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
|
||||
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
|
||||
useVariableFetchState: (): Record<string, unknown> => ({
|
||||
variableFetchCycleId: 0,
|
||||
variableFetchState: 'loading',
|
||||
isVariableSettled: false,
|
||||
isVariableFetching: true,
|
||||
hasVariableFetchedOnce: false,
|
||||
isVariableWaitingForDependencies: false,
|
||||
variableDependencyWaitMessage: '',
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock the getFieldValues API
|
||||
jest.mock('api/dynamicVariables/getFieldValues', () => ({
|
||||
getFieldValues: jest.fn(),
|
||||
@@ -108,7 +95,7 @@ describe('Dynamic Variable Default Behavior', () => {
|
||||
}
|
||||
}
|
||||
if (queryFn) {
|
||||
queryFn({ signal: undefined });
|
||||
queryFn();
|
||||
}
|
||||
}
|
||||
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
|
||||
@@ -247,7 +234,6 @@ describe('Dynamic Variable Default Behavior', () => {
|
||||
'2023-01-01T00:00:00Z',
|
||||
'2023-01-02T00:00:00Z',
|
||||
'',
|
||||
undefined, // signal
|
||||
);
|
||||
});
|
||||
|
||||
@@ -501,7 +487,6 @@ describe('Dynamic Variable Default Behavior', () => {
|
||||
'2023-01-01T00:00:00Z',
|
||||
'2023-01-02T00:00:00Z',
|
||||
'',
|
||||
undefined, // signal
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
@@ -49,11 +49,15 @@ const mockDashboard = {
|
||||
// Mock the dashboard provider with stable functions to prevent infinite loops
|
||||
const mockSetSelectedDashboard = jest.fn();
|
||||
const mockUpdateLocalStorageDashboardVariables = jest.fn();
|
||||
const mockSetVariablesToGetUpdated = jest.fn();
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard', () => ({
|
||||
useDashboard: (): any => ({
|
||||
selectedDashboard: mockDashboard,
|
||||
setSelectedDashboard: mockSetSelectedDashboard,
|
||||
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated: ['env'], // Stable initial value
|
||||
setVariablesToGetUpdated: mockSetVariablesToGetUpdated,
|
||||
}),
|
||||
}));
|
||||
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
VisibilityMode,
|
||||
} from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { get } from 'lodash-es';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -78,12 +77,6 @@ export function prepareBarPanelConfig({
|
||||
builder.setBands(getInitialStackedBands(seriesCount));
|
||||
}
|
||||
|
||||
const stepIntervals: Record<string, number> = get(
|
||||
apiResponse,
|
||||
'data.newResult.meta.stepIntervals',
|
||||
{},
|
||||
);
|
||||
|
||||
const seriesList: QueryData[] = apiResponse?.data?.result || [];
|
||||
seriesList.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
@@ -96,8 +89,6 @@ export function prepareBarPanelConfig({
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
const currentStepInterval = get(stepIntervals, series.queryName, undefined);
|
||||
|
||||
builder.addSeries({
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
@@ -110,7 +101,6 @@ export function prepareBarPanelConfig({
|
||||
showPoints: VisibilityMode.Never,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
stepInterval: currentStepInterval,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -6,12 +6,10 @@ import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
|
||||
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
|
||||
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
|
||||
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
|
||||
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableReference';
|
||||
import getTimeString from 'lib/getTimeString';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import isEmpty from 'lodash-es/isEmpty';
|
||||
@@ -55,6 +53,7 @@ function GridCardGraph({
|
||||
customOnRowClick,
|
||||
customTimeRangeWindowForCoRelation,
|
||||
enableDrillDown,
|
||||
widgetsByDynamicVariableId,
|
||||
}: GridCardGraphProps): JSX.Element {
|
||||
const dispatch = useDispatch();
|
||||
const [errorMessage, setErrorMessage] = useState<string>();
|
||||
@@ -65,8 +64,8 @@ function GridCardGraph({
|
||||
toScrollWidgetId,
|
||||
setToScrollWidgetId,
|
||||
setDashboardQueryRangeCalled,
|
||||
variablesToGetUpdated,
|
||||
} = useDashboard();
|
||||
|
||||
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
|
||||
AppState,
|
||||
GlobalReducer
|
||||
@@ -118,25 +117,10 @@ function GridCardGraph({
|
||||
|
||||
const updatedQuery = widget?.query;
|
||||
|
||||
const referencedVariableNames = useMemo(() => {
|
||||
if (!variables || !updatedQuery) {
|
||||
return [];
|
||||
}
|
||||
const allNames = Object.values(variables)
|
||||
.map((v) => v.name)
|
||||
.filter((name): name is string => !!name);
|
||||
return getVariableReferencesInQuery(updatedQuery, allNames);
|
||||
}, [updatedQuery, variables]);
|
||||
|
||||
const isEmptyWidget =
|
||||
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
|
||||
|
||||
const isPanelWaitingOnAnyVariable = useIsPanelWaitingOnVariable(
|
||||
referencedVariableNames,
|
||||
);
|
||||
|
||||
const queryEnabledCondition =
|
||||
isVisible && !isEmptyWidget && isQueryEnabled && !isPanelWaitingOnAnyVariable;
|
||||
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
|
||||
|
||||
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
|
||||
if (widget.panelTypes !== PANEL_TYPES.LIST) {
|
||||
@@ -193,6 +177,27 @@ function GridCardGraph({
|
||||
[requestData.query],
|
||||
);
|
||||
|
||||
// Bring back dependency on variable chaining for panels to refetch,
|
||||
// but only for non-dynamic variables. We derive a stable token from
|
||||
// the head of the variablesToGetUpdated queue when it's non-dynamic.
|
||||
const nonDynamicVariableChainToken = useMemo(() => {
|
||||
if (!variablesToGetUpdated || variablesToGetUpdated.length === 0) {
|
||||
return undefined;
|
||||
}
|
||||
if (!variables) {
|
||||
return undefined;
|
||||
}
|
||||
const headName = variablesToGetUpdated[0];
|
||||
const variableObj = Object.values(variables).find(
|
||||
(variable) => variable?.name === headName,
|
||||
);
|
||||
if (variableObj && variableObj.type !== 'DYNAMIC') {
|
||||
return headName;
|
||||
}
|
||||
return undefined;
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [variablesToGetUpdated, variables]);
|
||||
|
||||
const queryResponse = useGetQueryRange(
|
||||
{
|
||||
...requestData,
|
||||
@@ -219,7 +224,11 @@ function GridCardGraph({
|
||||
requestData,
|
||||
variables
|
||||
? Object.entries(variables).reduce((acc, [id, variable]) => {
|
||||
if (variable.name && referencedVariableNames.includes(variable.name)) {
|
||||
if (
|
||||
variable.type !== 'DYNAMIC' ||
|
||||
(widgetsByDynamicVariableId?.[variable.id] &&
|
||||
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
|
||||
) {
|
||||
return { ...acc, [id]: variable.selectedValue };
|
||||
}
|
||||
return acc;
|
||||
@@ -228,6 +237,9 @@ function GridCardGraph({
|
||||
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
|
||||
? [customTimeRange.startTime, customTimeRange.endTime]
|
||||
: []),
|
||||
// Include non-dynamic variable chaining token to drive refetches
|
||||
// only when a non-dynamic variable is at the head of the queue
|
||||
...(nonDynamicVariableChainToken ? [nonDynamicVariableChainToken] : []),
|
||||
],
|
||||
retry(failureCount, error): boolean {
|
||||
if (
|
||||
@@ -240,7 +252,7 @@ function GridCardGraph({
|
||||
return failureCount < 2;
|
||||
},
|
||||
keepPreviousData: true,
|
||||
enabled: queryEnabledCondition,
|
||||
enabled: queryEnabledCondition && !nonDynamicVariableChainToken,
|
||||
refetchOnMount: false,
|
||||
onError: (error) => {
|
||||
const errorMessage =
|
||||
@@ -307,7 +319,7 @@ function GridCardGraph({
|
||||
threshold={threshold}
|
||||
headerMenuList={menuList}
|
||||
isFetchingResponse={
|
||||
queryResponse.isFetching || isPanelWaitingOnAnyVariable
|
||||
queryResponse.isFetching || variablesToGetUpdated.length > 0
|
||||
}
|
||||
setRequestData={setRequestData}
|
||||
onClickHandler={onClickHandler}
|
||||
|
||||
@@ -72,6 +72,7 @@ export interface GridCardGraphProps {
|
||||
customOnRowClick?: (record: RowData) => void;
|
||||
customTimeRangeWindowForCoRelation?: string | undefined;
|
||||
enableDrillDown?: boolean;
|
||||
widgetsByDynamicVariableId?: Record<string, string[]>;
|
||||
}
|
||||
|
||||
export interface GetGraphVisibilityStateOnLegendClickProps {
|
||||
|
||||
@@ -16,6 +16,7 @@ import { themeColors } from 'constants/theme';
|
||||
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
@@ -101,6 +102,8 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
Record<string, { widgets: Layout[]; collapsed: boolean }>
|
||||
>({});
|
||||
|
||||
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentPanelMap(panelMap);
|
||||
}, [panelMap]);
|
||||
@@ -614,6 +617,7 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
|
||||
onDragSelect={onDragSelect}
|
||||
dataAvailable={checkIfDataExists}
|
||||
enableDrillDown={enableDrillDown}
|
||||
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
|
||||
/>
|
||||
</Card>
|
||||
</CardContainer>
|
||||
|
||||
@@ -5,12 +5,16 @@ import { PanelMode } from 'container/DashboardContainer/visualization/panels/typ
|
||||
import { WidgetGraphComponentProps } from 'container/GridCardLayout/GridCard/types';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
|
||||
import { SuccessResponse } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { QueryData } from 'types/api/widgets/getQuery';
|
||||
|
||||
export type PanelWrapperProps = {
|
||||
queryResponse: UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
|
||||
queryResponse: UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps, unknown>,
|
||||
Error
|
||||
>;
|
||||
widget: Widgets;
|
||||
setRequestData?: WidgetGraphComponentProps['setRequestData'];
|
||||
isFullViewMode?: boolean;
|
||||
|
||||
@@ -1,316 +0,0 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
import { dashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import { IDashboardVariablesStoreState } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
|
||||
import {
|
||||
VariableFetchState,
|
||||
variableFetchStore,
|
||||
} from 'providers/Dashboard/store/variableFetchStore';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { useIsPanelWaitingOnVariable } from '../useVariableFetchState';
|
||||
|
||||
function makeVariable(
|
||||
overrides: Partial<IDashboardVariable> & { id: string },
|
||||
): IDashboardVariable {
|
||||
return {
|
||||
name: overrides.id,
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function resetStores(): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states: {},
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
}));
|
||||
}
|
||||
|
||||
function setFetchStates(states: Record<string, VariableFetchState>): void {
|
||||
variableFetchStore.set(() => ({
|
||||
states,
|
||||
lastUpdated: {},
|
||||
cycleIds: {},
|
||||
}));
|
||||
}
|
||||
|
||||
function setDashboardVariables(
|
||||
overrides: Partial<IDashboardVariablesStoreState>,
|
||||
): void {
|
||||
dashboardVariablesStore.set(() => ({
|
||||
dashboardId: '',
|
||||
variables: {},
|
||||
sortedVariablesArray: [],
|
||||
dependencyData: null,
|
||||
variableTypes: {},
|
||||
dynamicVariableOrder: [],
|
||||
...overrides,
|
||||
}));
|
||||
}
|
||||
|
||||
describe('useIsPanelWaitingOnVariable', () => {
|
||||
beforeEach(() => {
|
||||
resetStores();
|
||||
});
|
||||
|
||||
it('should return false when variableNames is empty', () => {
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable([]));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when all referenced variables are idle', () => {
|
||||
setFetchStates({ a: 'idle', b: 'idle' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'val1' }),
|
||||
b: makeVariable({ id: 'b', selectedValue: 'val2' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when a variable is loading with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when a variable is waiting with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'waiting' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: '' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when a variable is revalidating with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'revalidating' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when a variable is loading but has a selectedValue', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'some-value' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for DYNAMIC variable with allSelected=true that is loading', () => {
|
||||
setFetchStates({ dyn: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: 'some-val',
|
||||
allSelected: true,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true for DYNAMIC variable with allSelected=true that is waiting', () => {
|
||||
setFetchStates({ dyn: 'waiting' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: 'val',
|
||||
allSelected: true,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for DYNAMIC variable with allSelected=true that is idle', () => {
|
||||
setFetchStates({ dyn: 'idle' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: 'val',
|
||||
allSelected: true,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for non-DYNAMIC variable with allSelected=false and non-empty value that is loading', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({
|
||||
id: 'a',
|
||||
selectedValue: 'val',
|
||||
allSelected: false,
|
||||
}),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true if any one of multiple variables is blocking', () => {
|
||||
setFetchStates({ a: 'idle', b: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'val' }),
|
||||
b: makeVariable({ id: 'b', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY', b: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when variable has no entry in fetch store (treated as idle)', () => {
|
||||
setFetchStates({}); // no state entry for 'a'
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: 'val' }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when variable is in error state with empty selectedValue', () => {
|
||||
setFetchStates({ a: 'error' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should react to store updates', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: undefined }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
|
||||
// Simulate variable fetch completing
|
||||
act(() => {
|
||||
variableFetchStore.update((d) => {
|
||||
d.states.a = 'idle';
|
||||
});
|
||||
});
|
||||
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle DYNAMIC variable with allSelected=false and empty selectedValue as blocking', () => {
|
||||
setFetchStates({ dyn: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
dyn: makeVariable({
|
||||
id: 'dyn',
|
||||
type: 'DYNAMIC',
|
||||
selectedValue: undefined,
|
||||
allSelected: false,
|
||||
}),
|
||||
},
|
||||
variableTypes: { dyn: 'DYNAMIC' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle variable with array selectedValue as non-blocking when loading', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: ['val1', 'val2'] }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle variable with empty array selectedValue as blocking when loading', () => {
|
||||
setFetchStates({ a: 'loading' });
|
||||
setDashboardVariables({
|
||||
variables: {
|
||||
a: makeVariable({ id: 'a', selectedValue: [] }),
|
||||
},
|
||||
variableTypes: { a: 'QUERY' },
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
|
||||
expect(result.current).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -10,12 +10,13 @@ import {
|
||||
GetQueryResultsProps,
|
||||
} from 'lib/dashboard/getQueryResults';
|
||||
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import APIError from 'types/api/error';
|
||||
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
type UseGetQueryRangeOptions = UseQueryOptions<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
|
||||
APIError | Error
|
||||
>;
|
||||
|
||||
@@ -29,7 +30,10 @@ type UseGetQueryRange = (
|
||||
widgetIndex: number;
|
||||
publicDashboardId: string;
|
||||
},
|
||||
) => UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
|
||||
) => UseQueryResult<
|
||||
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
|
||||
Error
|
||||
>;
|
||||
|
||||
export const useGetQueryRange: UseGetQueryRange = (
|
||||
requestData,
|
||||
@@ -141,7 +145,10 @@ export const useGetQueryRange: UseGetQueryRange = (
|
||||
};
|
||||
}, [options?.retry]);
|
||||
|
||||
return useQuery<MetricQueryRangeSuccessResponse, APIError | Error>({
|
||||
return useQuery<
|
||||
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
|
||||
APIError | Error
|
||||
>({
|
||||
queryFn: async ({ signal }) =>
|
||||
GetMetricQueryRange(
|
||||
modifiedRequestData,
|
||||
|
||||
@@ -19,11 +19,7 @@ import { Pagination } from 'hooks/queryPagination';
|
||||
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { SuccessResponse, SuccessResponseV2, Warning } from 'types/api';
|
||||
import {
|
||||
MetricQueryRangeSuccessResponse,
|
||||
MetricRangePayloadProps,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
import { ExecStats, MetricRangePayloadV5 } from 'types/api/v5/queryRange';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -209,13 +205,13 @@ export async function GetMetricQueryRange(
|
||||
widgetIndex: number;
|
||||
publicDashboardId: string;
|
||||
},
|
||||
): Promise<MetricQueryRangeSuccessResponse> {
|
||||
): Promise<SuccessResponse<MetricRangePayloadProps> & { warning?: Warning }> {
|
||||
let legendMap: Record<string, string>;
|
||||
let response:
|
||||
| MetricQueryRangeSuccessResponse
|
||||
| SuccessResponseV2<MetricRangePayloadV5>;
|
||||
| SuccessResponse<MetricRangePayloadProps>
|
||||
| SuccessResponseV2<MetricRangePayloadV5>
|
||||
| (SuccessResponse<MetricRangePayloadProps> & { warning?: Warning });
|
||||
let warning: Warning | undefined;
|
||||
let meta: ExecStats | undefined;
|
||||
|
||||
const panelType = props.originalGraphType || props.graphType;
|
||||
|
||||
@@ -303,7 +299,6 @@ export async function GetMetricQueryRange(
|
||||
);
|
||||
|
||||
warning = response.payload.warning || undefined;
|
||||
meta = response.payload.meta || undefined;
|
||||
} else {
|
||||
const v5Response = await getQueryRangeV5(
|
||||
v5Result.queryPayload,
|
||||
@@ -323,7 +318,6 @@ export async function GetMetricQueryRange(
|
||||
);
|
||||
|
||||
warning = response.payload.warning || undefined;
|
||||
meta = response.payload.meta || undefined;
|
||||
}
|
||||
} else {
|
||||
const legacyResult = prepareQueryRangePayload(props);
|
||||
@@ -390,7 +384,6 @@ export async function GetMetricQueryRange(
|
||||
return {
|
||||
...response,
|
||||
warning,
|
||||
meta,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -81,7 +81,6 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
barAlignment,
|
||||
barMaxWidth,
|
||||
barWidthFactor,
|
||||
stepInterval,
|
||||
} = this.props;
|
||||
if (pathBuilder) {
|
||||
return { paths: pathBuilder };
|
||||
@@ -105,7 +104,6 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
barAlignment,
|
||||
barMaxWidth,
|
||||
barWidthFactor,
|
||||
stepInterval,
|
||||
});
|
||||
|
||||
return pathsBuilder(self, seriesIdx, idx0, idx1);
|
||||
@@ -211,14 +209,12 @@ function getPathBuilder({
|
||||
barAlignment = BarAlignment.Center,
|
||||
barWidthFactor = 0.6,
|
||||
barMaxWidth = 200,
|
||||
stepInterval,
|
||||
}: {
|
||||
drawStyle: DrawStyle;
|
||||
lineInterpolation?: LineInterpolation;
|
||||
barAlignment?: BarAlignment;
|
||||
barMaxWidth?: number;
|
||||
barWidthFactor?: number;
|
||||
stepInterval?: number;
|
||||
}): Series.PathBuilder {
|
||||
if (!builders) {
|
||||
throw new Error('Required uPlot path builders are not available');
|
||||
@@ -226,13 +222,14 @@ function getPathBuilder({
|
||||
|
||||
if (drawStyle === DrawStyle.Bar) {
|
||||
const pathBuilders = uPlot.paths;
|
||||
return getBarPathBuilder({
|
||||
pathBuilders,
|
||||
barAlignment,
|
||||
barWidthFactor,
|
||||
barMaxWidth,
|
||||
stepInterval,
|
||||
});
|
||||
const barsConfigKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
|
||||
if (!builders[barsConfigKey] && pathBuilders.bars) {
|
||||
builders[barsConfigKey] = pathBuilders.bars({
|
||||
size: [barWidthFactor, barMaxWidth],
|
||||
align: barAlignment,
|
||||
});
|
||||
}
|
||||
return builders[barsConfigKey];
|
||||
}
|
||||
|
||||
if (drawStyle === DrawStyle.Line) {
|
||||
@@ -250,81 +247,4 @@ function getPathBuilder({
|
||||
return builders.spline;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function getBarPathBuilder({
|
||||
pathBuilders,
|
||||
barAlignment,
|
||||
barWidthFactor,
|
||||
barMaxWidth,
|
||||
stepInterval,
|
||||
}: {
|
||||
pathBuilders: typeof uPlot.paths;
|
||||
barAlignment: BarAlignment;
|
||||
barWidthFactor: number;
|
||||
barMaxWidth: number;
|
||||
stepInterval?: number;
|
||||
}): Series.PathBuilder {
|
||||
if (!builders) {
|
||||
throw new Error('Required uPlot path builders are not available');
|
||||
}
|
||||
|
||||
const barsPathBuilderFactory = pathBuilders.bars;
|
||||
|
||||
// When a stepInterval is provided (in seconds), cap the maximum bar width
|
||||
// so that a single bar never visually spans more than stepInterval worth
|
||||
// of time on the x-scale.
|
||||
if (
|
||||
typeof stepInterval === 'number' &&
|
||||
stepInterval > 0 &&
|
||||
barsPathBuilderFactory
|
||||
) {
|
||||
return (
|
||||
self: uPlot,
|
||||
seriesIdx: number,
|
||||
idx0: number,
|
||||
idx1: number,
|
||||
): Series.Paths | null => {
|
||||
let effectiveBarMaxWidth = barMaxWidth;
|
||||
|
||||
const xScale = self.scales.x as uPlot.Scale | undefined;
|
||||
if (xScale && typeof xScale.min === 'number') {
|
||||
const start = xScale.min as number;
|
||||
const end = start + stepInterval;
|
||||
const startPx = self.valToPos(start, 'x');
|
||||
const endPx = self.valToPos(end, 'x');
|
||||
const intervalPx = Math.abs(endPx - startPx);
|
||||
|
||||
if (intervalPx > 0) {
|
||||
effectiveBarMaxWidth =
|
||||
typeof barMaxWidth === 'number'
|
||||
? Math.min(barMaxWidth, intervalPx)
|
||||
: intervalPx;
|
||||
}
|
||||
}
|
||||
|
||||
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;
|
||||
if (builders && !builders[barsCfgKey]) {
|
||||
builders[barsCfgKey] = barsPathBuilderFactory({
|
||||
size: [barWidthFactor, effectiveBarMaxWidth],
|
||||
align: barAlignment,
|
||||
});
|
||||
}
|
||||
|
||||
return builders && builders[barsCfgKey]
|
||||
? builders[barsCfgKey](self, seriesIdx, idx0, idx1)
|
||||
: null;
|
||||
};
|
||||
}
|
||||
|
||||
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
|
||||
if (!builders[barsCfgKey] && barsPathBuilderFactory) {
|
||||
builders[barsCfgKey] = barsPathBuilderFactory({
|
||||
size: [barWidthFactor, barMaxWidth],
|
||||
align: barAlignment,
|
||||
});
|
||||
}
|
||||
|
||||
return builders[barsCfgKey];
|
||||
}
|
||||
|
||||
export type { SeriesProps };
|
||||
|
||||
@@ -176,7 +176,6 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
isDarkMode?: boolean;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
export interface LegendItem {
|
||||
|
||||
@@ -84,6 +84,8 @@ const DashboardContext = createContext<IDashboardContext>({
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: () => {},
|
||||
updateLocalStorageDashboardVariables: () => {},
|
||||
variablesToGetUpdated: [],
|
||||
setVariablesToGetUpdated: () => {},
|
||||
dashboardQueryRangeCalled: false,
|
||||
setDashboardQueryRangeCalled: () => {},
|
||||
selectedRowWidgetId: '',
|
||||
@@ -181,6 +183,10 @@ export function DashboardProvider({
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
const [layouts, setLayouts] = useState<Layout[]>([]);
|
||||
|
||||
const [panelMap, setPanelMap] = useState<
|
||||
@@ -511,6 +517,8 @@ export function DashboardProvider({
|
||||
updatedTimeRef,
|
||||
setToScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
selectedRowWidgetId,
|
||||
@@ -533,6 +541,8 @@ export function DashboardProvider({
|
||||
toScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
currentDashboard,
|
||||
variablesToGetUpdated,
|
||||
setVariablesToGetUpdated,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
selectedRowWidgetId,
|
||||
|
||||
@@ -47,6 +47,8 @@ export interface IDashboardContext {
|
||||
allSelected: boolean,
|
||||
isDynamic?: boolean,
|
||||
) => void;
|
||||
variablesToGetUpdated: string[];
|
||||
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
|
||||
dashboardQueryRangeCalled: boolean;
|
||||
setDashboardQueryRangeCalled: (value: boolean) => void;
|
||||
selectedRowWidgetId: string | null;
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
import { SuccessResponse, Warning } from '..';
|
||||
import { Warning } from '..';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
IClickHouseQuery,
|
||||
IPromQLQuery,
|
||||
} from '../queryBuilder/queryBuilderData';
|
||||
import { ExecStats } from '../v5/queryRange';
|
||||
import { QueryData, QueryDataV3 } from '../widgets/getQuery';
|
||||
|
||||
export type QueryRangePayload = {
|
||||
@@ -36,15 +35,8 @@ export interface MetricRangePayloadProps {
|
||||
newResult: MetricRangePayloadV3;
|
||||
warnings?: string[];
|
||||
};
|
||||
meta?: ExecStats;
|
||||
}
|
||||
|
||||
/** Query range success response including optional warning and meta */
|
||||
export type MetricQueryRangeSuccessResponse = SuccessResponse<
|
||||
MetricRangePayloadProps,
|
||||
unknown
|
||||
> & { warning?: Warning; meta?: ExecStats };
|
||||
|
||||
export interface MetricRangePayloadV3 {
|
||||
data: {
|
||||
result: QueryDataV3[];
|
||||
@@ -52,5 +44,4 @@ export interface MetricRangePayloadV3 {
|
||||
warnings?: string[];
|
||||
};
|
||||
warning?: Warning;
|
||||
meta?: ExecStats;
|
||||
}
|
||||
|
||||
@@ -334,7 +334,6 @@ export interface ExecStats {
|
||||
rowsScanned: number;
|
||||
bytesScanned: number;
|
||||
durationMs: number;
|
||||
stepIntervals: Record<string, number>;
|
||||
}
|
||||
|
||||
export interface Label {
|
||||
|
||||
@@ -3,8 +3,9 @@ package flagger
|
||||
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -17,6 +18,14 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureInterpolationEnabled,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether to enable interpolation",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureKafkaSpanEval,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
@@ -3261,20 +3261,14 @@ func (r *ClickHouseReader) GetMetricAggregateAttributes(ctx context.Context, org
|
||||
metadata := metadataMap[name]
|
||||
|
||||
typ := string(metadata.MetricType)
|
||||
temporality := string(metadata.Temporality)
|
||||
isMonotonic := metadata.IsMonotonic
|
||||
|
||||
// Non-monotonic cumulative sums are treated as gauges
|
||||
if typ == "Sum" && !isMonotonic && temporality == string(v3.Cumulative) {
|
||||
typ = "Gauge"
|
||||
}
|
||||
|
||||
// unlike traces/logs `tag`/`resource` type, the `Type` will be metric type
|
||||
key := v3.AttributeKey{
|
||||
Key: name,
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
Type: v3.AttributeKeyType(typ),
|
||||
IsColumn: true,
|
||||
Key: name,
|
||||
DataType: v3.AttributeKeyDataTypeFloat64,
|
||||
Type: v3.AttributeKeyType(typ),
|
||||
IsMonotonic: metadata.IsMonotonic,
|
||||
IsColumn: true,
|
||||
}
|
||||
|
||||
if _, ok := seen[name+typ]; ok {
|
||||
@@ -5419,6 +5413,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
|
||||
t.metric_name AS metric_name,
|
||||
ANY_VALUE(t.description) AS description,
|
||||
ANY_VALUE(t.type) AS metric_type,
|
||||
ANY_VALUE(t.is_monotonic) AS metric_is_monotonic,
|
||||
ANY_VALUE(t.unit) AS metric_unit,
|
||||
uniq(t.fingerprint) AS timeseries,
|
||||
uniq(metric_name) OVER() AS total
|
||||
@@ -5450,7 +5445,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
|
||||
|
||||
for rows.Next() {
|
||||
var metric metrics_explorer.MetricDetail
|
||||
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
|
||||
if err := rows.Scan(&metric.MetricName, &metric.Description, &metric.MetricType, &metric.IsMonotonic, &metric.MetricUnit, &metric.TimeSeries, &response.Total); err != nil {
|
||||
zap.L().Error("Error scanning metric row", zap.Error(err))
|
||||
return &response, &model.ApiError{Typ: "ClickHouseError", Err: err}
|
||||
}
|
||||
|
||||
@@ -36,6 +36,7 @@ type MetricDetail struct {
|
||||
TimeSeries uint64 `json:"timeseries"`
|
||||
Samples uint64 `json:"samples"`
|
||||
LastReceived int64 `json:"lastReceived"`
|
||||
IsMonotonic bool `json:"is_monotonic"`
|
||||
}
|
||||
|
||||
type TreeMapResponseItem struct {
|
||||
|
||||
@@ -381,11 +381,12 @@ func (t AttributeKeyType) String() string {
|
||||
}
|
||||
|
||||
type AttributeKey struct {
|
||||
Key string `json:"key"`
|
||||
DataType AttributeKeyDataType `json:"dataType"`
|
||||
Type AttributeKeyType `json:"type"`
|
||||
IsColumn bool `json:"isColumn"`
|
||||
IsJSON bool `json:"isJSON"`
|
||||
Key string `json:"key"`
|
||||
DataType AttributeKeyDataType `json:"dataType"`
|
||||
Type AttributeKeyType `json:"type"`
|
||||
IsColumn bool `json:"isColumn"`
|
||||
IsMonotonic bool `json:"is_monotonic"`
|
||||
IsJSON bool `json:"isJSON"`
|
||||
}
|
||||
|
||||
func (a AttributeKey) CacheKey() string {
|
||||
|
||||
@@ -205,7 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = matchingKey.Indexes
|
||||
key.Materialized = matchingKey.Materialized
|
||||
key.JSONPlan = matchingKey.JSONPlan
|
||||
|
||||
|
||||
return actions
|
||||
} else {
|
||||
// multiple matching keys, set materialized only if all the keys are materialized
|
||||
|
||||
@@ -483,22 +483,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
value1 := v.Visit(values[0])
|
||||
value2 := v.Visit(values[1])
|
||||
|
||||
switch value1.(type) {
|
||||
case float64:
|
||||
if _, ok := value2.(float64); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
case string:
|
||||
if _, ok := value2.(string); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
default:
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
@@ -871,7 +855,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -375,6 +375,13 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
|
||||
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
|
||||
}
|
||||
|
||||
if os.Getenv("INTERPOLATION_ENABLED") != "" {
|
||||
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
|
||||
if config.Flagger.Config.Boolean == nil {
|
||||
config.Flagger.Config.Boolean = make(map[string]bool)
|
||||
}
|
||||
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
|
||||
}
|
||||
}
|
||||
|
||||
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
@@ -74,7 +73,7 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
|
||||
if b.metricsStatementBuilder.CanShortCircuitDelta(query) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables); err != nil {
|
||||
return nil, err
|
||||
@@ -92,9 +91,8 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -124,16 +122,13 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
return "", []any{}, err
|
||||
}
|
||||
sb.SelectMore(col)
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -155,7 +150,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
return "", []any{}, err
|
||||
}
|
||||
}
|
||||
if filterWhere != nil {
|
||||
@@ -213,11 +208,8 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -286,10 +278,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
|
||||
@@ -326,23 +315,25 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -357,15 +348,7 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any, error) {
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
|
||||
)
|
||||
}
|
||||
) (string, []any) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -382,5 +365,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrymeter
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -64,7 +63,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) (string, error) {
|
||||
) string {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -191,13 +190,5 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
return aggregationColumn
|
||||
}
|
||||
|
||||
@@ -29,7 +29,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
@@ -38,18 +44,6 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
// TODO(srikanthccv): use the same data type collision handling when metrics schemas are updated
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
case []any:
|
||||
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
|
||||
if _, ok := v[0].(float64); ok {
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(tblFieldName, value), nil
|
||||
|
||||
@@ -5,27 +5,67 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
const (
|
||||
RateTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))`
|
||||
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
|
||||
IncreaseTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value, per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)`
|
||||
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
|
||||
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s, ((%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
|
||||
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1) OVER rate_window), (%s - lagInFrame(%s, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))) AS per_series_value`
|
||||
RateWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Assume linear growth to next point
|
||||
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected
|
||||
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
ELSE
|
||||
-- Normal case: calculate rate
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
|
||||
(ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
END`
|
||||
|
||||
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
|
||||
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
IncreaseWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Calculate the interpolated increase for this interval
|
||||
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)) *
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected: the increase is the current value
|
||||
per_series_value
|
||||
ELSE
|
||||
-- Normal case: calculate increase
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
|
||||
END`
|
||||
)
|
||||
|
||||
type MetricQueryStatementBuilder struct {
|
||||
@@ -107,6 +147,54 @@ func (b *MetricQueryStatementBuilder) Build(
|
||||
return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
|
||||
}
|
||||
|
||||
// Fast‑path (no fingerprint grouping)
|
||||
// canShortCircuitDelta returns true if we can use the optimized query
|
||||
// for the given query
|
||||
// This is used to avoid the group by fingerprint thus improving the performance
|
||||
// for certain queries
|
||||
// cases where we can short circuit:
|
||||
// 1. time aggregation = (rate|increase) and space aggregation = sum
|
||||
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
|
||||
//
|
||||
// 2. time aggregation = sum and space aggregation = sum
|
||||
// - sum of sums is same as sum of all values
|
||||
//
|
||||
// 3. time aggregation = min and space aggregation = min
|
||||
// - min of mins is same as min of all values
|
||||
//
|
||||
// 4. time aggregation = max and space aggregation = max
|
||||
// - max of maxs is same as max of all values
|
||||
//
|
||||
// 5. special case exphist, there is no need for per series/fingerprint aggregation
|
||||
// we can directly use the quantilesDDMerge function
|
||||
//
|
||||
// all of this is true only for delta metrics
|
||||
func (b *MetricQueryStatementBuilder) CanShortCircuitDelta(q qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) bool {
|
||||
if q.Aggregations[0].Temporality != metrictypes.Delta {
|
||||
return false
|
||||
}
|
||||
|
||||
ta := q.Aggregations[0].TimeAggregation
|
||||
sa := q.Aggregations[0].SpaceAggregation
|
||||
|
||||
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) && sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
|
||||
return true
|
||||
}
|
||||
if q.Aggregations[0].Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
ctx context.Context,
|
||||
start, end uint64,
|
||||
@@ -168,11 +256,10 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
|
||||
if b.CanShortCircuitDelta(query) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -186,9 +273,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -208,7 +294,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
timeSeriesCTEArgs []any,
|
||||
) (string, []any, error) {
|
||||
) (string, []any) {
|
||||
stepSec := int64(query.StepInterval.Seconds())
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -221,15 +307,11 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(
|
||||
aggCol := AggregationColumnForSamplesTable(
|
||||
start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints,
|
||||
)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
|
||||
@@ -252,7 +334,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
@@ -355,12 +437,8 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
|
||||
@@ -383,7 +461,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
_ context.Context,
|
||||
ctx context.Context,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
@@ -401,10 +479,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
baseSb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
@@ -421,25 +496,36 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
|
||||
// ! TODO (balanikaran) Get OrgID via function parameter instead of valuer.GenerateUUID()
|
||||
interpolationEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureInterpolationEnabled, featuretypes.NewFlaggerEvaluationContext(valuer.GenerateUUID()))
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
|
||||
if interpolationEnabled {
|
||||
rateExpr = RateWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", RateTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
|
||||
if interpolationEnabled {
|
||||
incExpr = IncreaseWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", IncreaseTmpl))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -448,6 +534,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
}
|
||||
|
||||
// because RateInterpolation is not enabled anywhere due to some gaps in the logic wrt cache handling, it hasn't been considered for the multi temporality
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
|
||||
_ context.Context,
|
||||
start, end uint64,
|
||||
@@ -466,32 +553,18 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggForDeltaTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggForCumulativeTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggForDeltaTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggForCumulativeTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggForDeltaTemporality = fmt.Sprintf("%s/%d", aggForDeltaTemporality, stepSec)
|
||||
}
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality,
|
||||
aggForDeltaTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
)
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, aggForCumulativeTemporality, aggForCumulativeTemporality, start)
|
||||
sb.SelectMore(rateExpr)
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality,
|
||||
aggForDeltaTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
)
|
||||
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, start)
|
||||
sb.SelectMore(increaseExpr)
|
||||
default:
|
||||
expr := fmt.Sprintf(OthersMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality)
|
||||
@@ -519,14 +592,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any, error) {
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
)
|
||||
}
|
||||
) (string, []any) {
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -543,7 +609,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
@@ -575,7 +641,9 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
quantile,
|
||||
))
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
for _, g := range query.GroupBy {
|
||||
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
@@ -591,8 +659,6 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
}
|
||||
sb.OrderBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.OrderBy("ts")
|
||||
|
||||
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return &qbtypes.Statement{Query: combined + q, Args: append(args, a...)}, nil
|
||||
|
||||
@@ -50,7 +50,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -83,7 +83,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -116,7 +116,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -148,7 +148,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -181,7 +181,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -210,7 +210,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrymetrics
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -169,7 +168,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) (string, error) {
|
||||
) string {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -299,12 +298,5 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
}
|
||||
}
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
return aggregationColumn
|
||||
}
|
||||
|
||||
@@ -35,7 +35,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
|
||||
@@ -152,9 +152,7 @@ func (f FilterOperator) IsStringSearchOperator() bool {
|
||||
FilterOperatorILike,
|
||||
FilterOperatorNotILike,
|
||||
FilterOperatorLike,
|
||||
FilterOperatorNotLike,
|
||||
FilterOperatorRegexp,
|
||||
FilterOperatorNotRegexp:
|
||||
FilterOperatorNotLike:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
|
||||
@@ -3,7 +3,6 @@ package querybuildertypesv5
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
@@ -175,54 +174,3 @@ func (q *QueryBuilderQuery[T]) Normalize() {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Fast‑path (no fingerprint grouping)
|
||||
// canShortCircuitDelta returns true if we can use the optimized query
|
||||
// for the given query
|
||||
// This is used to avoid the group by fingerprint thus improving the performance
|
||||
// for certain queries
|
||||
// cases where we can short circuit:
|
||||
// 1. time aggregation = (rate|increase) and space aggregation = sum
|
||||
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
|
||||
//
|
||||
// 2. time aggregation = sum and space aggregation = sum
|
||||
// - sum of sums is same as sum of all values
|
||||
//
|
||||
// 3. time aggregation = min and space aggregation = min
|
||||
// - min of mins is same as min of all values
|
||||
//
|
||||
// 4. time aggregation = max and space aggregation = max
|
||||
// - max of maxs is same as max of all values
|
||||
//
|
||||
// 5. special case exphist, there is no need for per series/fingerprint aggregation
|
||||
// we can directly use the quantilesDDMerge function
|
||||
//
|
||||
// all of this is true only for delta metrics
|
||||
func CanShortCircuitDelta(metricAgg MetricAggregation) bool {
|
||||
|
||||
if metricAgg.Temporality != metrictypes.Delta {
|
||||
return false
|
||||
}
|
||||
|
||||
ta := metricAgg.TimeAggregation
|
||||
sa := metricAgg.SpaceAggregation
|
||||
|
||||
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) &&
|
||||
sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
|
||||
return true
|
||||
}
|
||||
if metricAgg.Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
var (
|
||||
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
|
||||
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
|
||||
ErrBetweenValuesType = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values of the number type")
|
||||
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
|
||||
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeFormula:
|
||||
var spec QueryBuilderFormula
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "formula spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid formula spec: %v", err)
|
||||
}
|
||||
@@ -83,7 +83,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeJoin:
|
||||
var spec QueryBuilderJoin
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "join spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid join spec: %v", err)
|
||||
}
|
||||
@@ -98,7 +98,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypePromQL:
|
||||
var spec PromQuery
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for PromQuery
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for PromQuery
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "PromQL spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid PromQL spec: %v", err)
|
||||
}
|
||||
@@ -106,7 +106,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeClickHouseSQL:
|
||||
var spec ClickHouseQuery
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "ClickHouse SQL spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid ClickHouse SQL spec: %v", err)
|
||||
}
|
||||
@@ -439,7 +439,7 @@ func (r *QueryRangeRequest) GetQueriesSupportingZeroDefault() map[string]bool {
|
||||
expr = strings.ToLower(expr)
|
||||
// only pure additive/counting operations should default to zero,
|
||||
// while statistical/analytical operations should show gaps when there's no data to analyze.
|
||||
// TODO(srikanthccv): use newExprVisitor for getting the function used in the expression
|
||||
// TODO: use newExprVisitor for getting the function used in the expression
|
||||
if strings.HasPrefix(expr, "count(") ||
|
||||
strings.HasPrefix(expr, "count_distinct(") ||
|
||||
strings.HasPrefix(expr, "sum(") ||
|
||||
|
||||
@@ -21,12 +21,3 @@ var (
|
||||
// []Bucket (struct{Lower,Upper,Count float64}), example: histogram
|
||||
RequestTypeDistribution = RequestType{valuer.NewString("distribution")}
|
||||
)
|
||||
|
||||
// IsAggregation returns true for request types that produce aggregated results
|
||||
// (time_series, scalar, distribution). For these types, fields like groupBy,
|
||||
// having, aggregations, and orderBy (with aggregation key validation) are meaningful.
|
||||
// For non-aggregation types (raw, raw_stream, trace), those fields are ignored
|
||||
// and don't need to be validated.
|
||||
func (r RequestType) IsAggregation() bool {
|
||||
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
|
||||
}
|
||||
|
||||
@@ -10,78 +10,54 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// queryName returns the name from any query envelope spec type.
|
||||
func (e QueryEnvelope) queryName() string {
|
||||
switch spec := e.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Name
|
||||
case QueryBuilderFormula:
|
||||
return spec.Name
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Name
|
||||
case QueryBuilderJoin:
|
||||
return spec.Name
|
||||
case PromQuery:
|
||||
return spec.Name
|
||||
case ClickHouseQuery:
|
||||
return spec.Name
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// isDisabled returns the disabled status from any query envelope spec type.
|
||||
func (e QueryEnvelope) isDisabled() bool {
|
||||
switch spec := e.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
return spec.Disabled
|
||||
case QueryBuilderFormula:
|
||||
return spec.Disabled
|
||||
case QueryBuilderTraceOperator:
|
||||
return spec.Disabled
|
||||
case QueryBuilderJoin:
|
||||
return spec.Disabled
|
||||
case PromQuery:
|
||||
return spec.Disabled
|
||||
case ClickHouseQuery:
|
||||
return spec.Disabled
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// getQueryIdentifier returns a friendly identifier for a query based on its type and name/content
|
||||
func getQueryIdentifier(envelope QueryEnvelope, index int) string {
|
||||
name := envelope.queryName()
|
||||
|
||||
var typeLabel string
|
||||
switch envelope.Type {
|
||||
case QueryTypeBuilder, QueryTypeSubQuery:
|
||||
typeLabel = "query"
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
if spec.Name != "" {
|
||||
return fmt.Sprintf("query '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("trace query at position %d", index+1)
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
if spec.Name != "" {
|
||||
return fmt.Sprintf("query '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("log query at position %d", index+1)
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
if spec.Name != "" {
|
||||
return fmt.Sprintf("query '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("metric query at position %d", index+1)
|
||||
}
|
||||
case QueryTypeFormula:
|
||||
typeLabel = "formula"
|
||||
if spec, ok := envelope.Spec.(QueryBuilderFormula); ok && spec.Name != "" {
|
||||
return fmt.Sprintf("formula '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("formula at position %d", index+1)
|
||||
case QueryTypeTraceOperator:
|
||||
typeLabel = "trace operator"
|
||||
if spec, ok := envelope.Spec.(QueryBuilderTraceOperator); ok && spec.Name != "" {
|
||||
return fmt.Sprintf("trace operator '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("trace operator at position %d", index+1)
|
||||
case QueryTypeJoin:
|
||||
typeLabel = "join"
|
||||
if spec, ok := envelope.Spec.(QueryBuilderJoin); ok && spec.Name != "" {
|
||||
return fmt.Sprintf("join '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("join at position %d", index+1)
|
||||
case QueryTypePromQL:
|
||||
typeLabel = "PromQL query"
|
||||
if spec, ok := envelope.Spec.(PromQuery); ok && spec.Name != "" {
|
||||
return fmt.Sprintf("PromQL query '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("PromQL query at position %d", index+1)
|
||||
case QueryTypeClickHouseSQL:
|
||||
typeLabel = "ClickHouse query"
|
||||
default:
|
||||
typeLabel = "query"
|
||||
if spec, ok := envelope.Spec.(ClickHouseQuery); ok && spec.Name != "" {
|
||||
return fmt.Sprintf("ClickHouse query '%s'", spec.Name)
|
||||
}
|
||||
return fmt.Sprintf("ClickHouse query at position %d", index+1)
|
||||
}
|
||||
|
||||
if name != "" {
|
||||
return fmt.Sprintf("%s '%s'", typeLabel, name)
|
||||
}
|
||||
return fmt.Sprintf("%s at position %d", typeLabel, index+1)
|
||||
return fmt.Sprintf("query at position %d", index+1)
|
||||
}
|
||||
|
||||
const (
|
||||
@@ -96,12 +72,11 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateAggregations(requestType); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateGroupBy(requestType); err != nil {
|
||||
return err
|
||||
// Validate aggregations only for non-raw request types
|
||||
if requestType != RequestTypeRaw && requestType != RequestTypeRawStream && requestType != RequestTypeTrace {
|
||||
if err := q.validateAggregations(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Validate limit and pagination
|
||||
@@ -119,23 +94,32 @@ func (q *QueryBuilderQuery[T]) Validate(requestType RequestType) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := q.validateOrderBy(requestType); err != nil {
|
||||
return err
|
||||
if requestType != RequestTypeRaw && requestType != RequestTypeTrace && len(q.Aggregations) > 0 {
|
||||
if err := q.validateOrderByForAggregation(); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := q.validateOrderBy(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := q.validateSelectFields(requestType); err != nil {
|
||||
return err
|
||||
if requestType != RequestTypeRaw && requestType != RequestTypeTrace {
|
||||
if err := q.validateHaving(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if requestType == RequestTypeRaw {
|
||||
if err := q.validateSelectFields(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateSelectFields(requestType RequestType) error {
|
||||
// selectFields don't apply to aggregation queries, skip validation
|
||||
if requestType.IsAggregation() {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateSelectFields() error {
|
||||
// isRoot and isEntryPoint are returned by the Metadata API, so if someone sends them, we have to reject the request.
|
||||
for _, v := range q.SelectFields {
|
||||
if v.Name == "isRoot" || v.Name == "isEntryPoint" {
|
||||
@@ -148,21 +132,6 @@ func (q *QueryBuilderQuery[T]) validateSelectFields(requestType RequestType) err
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateGroupBy(requestType RequestType) error {
|
||||
// groupBy doesn't apply to non-aggregation queries, skip validation
|
||||
if !requestType.IsAggregation() {
|
||||
return nil
|
||||
}
|
||||
for idx, item := range q.GroupBy {
|
||||
if item.TelemetryFieldKey.Name == "" {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput, "invalid empty key name for group by at index %d", idx,
|
||||
)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateSignal() error {
|
||||
// Signal validation is handled during unmarshaling in req.go
|
||||
// Valid signals are: metrics, traces, logs
|
||||
@@ -183,12 +152,7 @@ func (q *QueryBuilderQuery[T]) validateSignal() error {
|
||||
}
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateAggregations(requestType RequestType) error {
|
||||
// aggregations don't apply to non-aggregation queries, skip validation
|
||||
if !requestType.IsAggregation() {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateAggregations() error {
|
||||
// At least one aggregation required for non-disabled queries
|
||||
if len(q.Aggregations) == 0 && !q.Disabled {
|
||||
return errors.NewInvalidInputf(
|
||||
@@ -215,6 +179,14 @@ func (q *QueryBuilderQuery[T]) validateAggregations(requestType RequestType) err
|
||||
aggId,
|
||||
)
|
||||
}
|
||||
// Validate metric-specific aggregations
|
||||
if err := validateMetricAggregation(v); err != nil {
|
||||
aggId := fmt.Sprintf("aggregation #%d", i+1)
|
||||
if q.Name != "" {
|
||||
aggId = fmt.Sprintf("aggregation #%d in query '%s'", i+1, q.Name)
|
||||
}
|
||||
return wrapValidationError(err, aggId, "invalid metric %s: %s")
|
||||
}
|
||||
case TraceAggregation:
|
||||
if v.Expression == "" {
|
||||
aggId := fmt.Sprintf("aggregation #%d", i+1)
|
||||
@@ -329,7 +301,7 @@ func (q *QueryBuilderQuery[T]) validateSecondaryAggregations() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateOrderBy(requestType RequestType) error {
|
||||
func (q *QueryBuilderQuery[T]) validateOrderBy() error {
|
||||
for i, order := range q.Order {
|
||||
// Direction validation is handled by the OrderDirection type
|
||||
if order.Direction != OrderDirectionAsc && order.Direction != OrderDirectionDesc {
|
||||
@@ -347,12 +319,6 @@ func (q *QueryBuilderQuery[T]) validateOrderBy(requestType RequestType) error {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// aggregation-specific order key validation only applies to aggregation queries
|
||||
if requestType.IsAggregation() {
|
||||
return q.validateOrderByForAggregation()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -362,6 +328,10 @@ func (q *QueryBuilderQuery[T]) validateOrderBy(requestType RequestType) error {
|
||||
// 2. Aggregation expressions or aliases
|
||||
// 3. Aggregation index (0, 1, 2, etc.)
|
||||
func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
|
||||
// First validate basic order by constraints
|
||||
if err := q.validateOrderBy(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
validOrderKeys := make(map[string]bool)
|
||||
|
||||
@@ -431,6 +401,22 @@ func (q *QueryBuilderQuery[T]) validateOrderByForAggregation() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (q *QueryBuilderQuery[T]) validateHaving() error {
|
||||
if q.Having == nil || q.Having.Expression == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
// ensure that having is only used with aggregations
|
||||
if len(q.Aggregations) == 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"having clause can only be used with aggregation queries. Use `filter.expression` instead",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ValidateQueryRangeRequest validates the entire query range request
|
||||
func (r *QueryRangeRequest) Validate() error {
|
||||
// Validate time range
|
||||
@@ -470,20 +456,236 @@ func (r *QueryRangeRequest) Validate() error {
|
||||
|
||||
// validateAllQueriesNotDisabled validates that at least one query in the composite query is enabled
|
||||
func (r *QueryRangeRequest) validateAllQueriesNotDisabled() error {
|
||||
allDisabled := true
|
||||
for _, envelope := range r.CompositeQuery.Queries {
|
||||
if !envelope.isDisabled() {
|
||||
return nil
|
||||
switch envelope.Type {
|
||||
case QueryTypeBuilder, QueryTypeSubQuery:
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
if !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
if !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
if !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
}
|
||||
case QueryTypeFormula:
|
||||
if spec, ok := envelope.Spec.(QueryBuilderFormula); ok && !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
case QueryTypeTraceOperator:
|
||||
if spec, ok := envelope.Spec.(QueryBuilderTraceOperator); ok && !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
case QueryTypeJoin:
|
||||
if spec, ok := envelope.Spec.(QueryBuilderJoin); ok && !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
case QueryTypePromQL:
|
||||
if spec, ok := envelope.Spec.(PromQuery); ok && !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
case QueryTypeClickHouseSQL:
|
||||
if spec, ok := envelope.Spec.(ClickHouseQuery); ok && !spec.Disabled {
|
||||
allDisabled = false
|
||||
}
|
||||
}
|
||||
|
||||
// Early exit if we find at least one enabled query
|
||||
if !allDisabled {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"all queries are disabled - at least one query must be enabled",
|
||||
)
|
||||
if allDisabled {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"all queries are disabled - at least one query must be enabled",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) validateCompositeQuery() error {
|
||||
return r.CompositeQuery.Validate(r.RequestType)
|
||||
// Validate queries in composite query
|
||||
if len(r.CompositeQuery.Queries) == 0 {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"at least one query is required",
|
||||
)
|
||||
}
|
||||
|
||||
// Track query names for uniqueness (only for non-formula queries)
|
||||
queryNames := make(map[string]bool)
|
||||
|
||||
// Validate each query based on its type
|
||||
for i, envelope := range r.CompositeQuery.Queries {
|
||||
switch envelope.Type {
|
||||
case QueryTypeBuilder, QueryTypeSubQuery:
|
||||
// Validate based on the concrete type
|
||||
switch spec := envelope.Spec.(type) {
|
||||
case QueryBuilderQuery[TraceAggregation]:
|
||||
if err := spec.Validate(r.RequestType); err != nil {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
// Check name uniqueness for non-formula context
|
||||
if spec.Name != "" {
|
||||
if queryNames[spec.Name] {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"duplicate query name '%s'",
|
||||
spec.Name,
|
||||
)
|
||||
}
|
||||
queryNames[spec.Name] = true
|
||||
}
|
||||
case QueryBuilderQuery[LogAggregation]:
|
||||
if err := spec.Validate(r.RequestType); err != nil {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
// Check name uniqueness for non-formula context
|
||||
if spec.Name != "" {
|
||||
if queryNames[spec.Name] {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"duplicate query name '%s'",
|
||||
spec.Name,
|
||||
)
|
||||
}
|
||||
queryNames[spec.Name] = true
|
||||
}
|
||||
case QueryBuilderQuery[MetricAggregation]:
|
||||
if err := spec.Validate(r.RequestType); err != nil {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
// Check name uniqueness for non-formula context
|
||||
if spec.Name != "" {
|
||||
if queryNames[spec.Name] {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"duplicate query name '%s'",
|
||||
spec.Name,
|
||||
)
|
||||
}
|
||||
queryNames[spec.Name] = true
|
||||
}
|
||||
default:
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown spec type for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case QueryTypeFormula:
|
||||
// Formula validation is handled separately
|
||||
spec, ok := envelope.Spec.(QueryBuilderFormula)
|
||||
if !ok {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Expression == "" {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case QueryTypeJoin:
|
||||
// Join validation is handled separately
|
||||
_, ok := envelope.Spec.(QueryBuilderJoin)
|
||||
if !ok {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case QueryTypeTraceOperator:
|
||||
spec, ok := envelope.Spec.(QueryBuilderTraceOperator)
|
||||
if !ok {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Expression == "" {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case QueryTypePromQL:
|
||||
// PromQL validation is handled separately
|
||||
spec, ok := envelope.Spec.(PromQuery)
|
||||
if !ok {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
case QueryTypeClickHouseSQL:
|
||||
// ClickHouse SQL validation is handled separately
|
||||
spec, ok := envelope.Spec.(ClickHouseQuery)
|
||||
if !ok {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid spec for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
if spec.Query == "" {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"query expression is required for %s",
|
||||
queryId,
|
||||
)
|
||||
}
|
||||
default:
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"unknown query type '%s' for %s",
|
||||
envelope.Type,
|
||||
queryId,
|
||||
).WithAdditional(
|
||||
"Valid query types are: builder_query, builder_formula, builder_join, promql, clickhouse_sql, trace_operator",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate performs validation on CompositeQuery
|
||||
@@ -495,29 +697,12 @@ func (c *CompositeQuery) Validate(requestType RequestType) error {
|
||||
)
|
||||
}
|
||||
|
||||
// Track query names for uniqueness (only for builder queries)
|
||||
queryNames := make(map[string]bool)
|
||||
|
||||
// Validate each query
|
||||
for i, envelope := range c.Queries {
|
||||
if err := validateQueryEnvelope(envelope, requestType); err != nil {
|
||||
queryId := getQueryIdentifier(envelope, i)
|
||||
return wrapValidationError(err, queryId, "invalid %s: %s")
|
||||
}
|
||||
|
||||
// Check name uniqueness for builder queries
|
||||
if envelope.Type == QueryTypeBuilder || envelope.Type == QueryTypeSubQuery {
|
||||
name := envelope.queryName()
|
||||
if name != "" {
|
||||
if queryNames[name] {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"duplicate query name '%s'",
|
||||
name,
|
||||
)
|
||||
}
|
||||
queryNames[name] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -618,3 +803,85 @@ func validateQueryEnvelope(envelope QueryEnvelope, requestType RequestType) erro
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// validateMetricAggregation validates metric-specific aggregation parameters
|
||||
func validateMetricAggregation(agg MetricAggregation) error {
|
||||
// we can't decide anything here without known temporality
|
||||
if agg.Temporality == metrictypes.Unknown {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Validate that rate/increase are only used with appropriate temporalities
|
||||
if agg.TimeAggregation == metrictypes.TimeAggregationRate || agg.TimeAggregation == metrictypes.TimeAggregationIncrease {
|
||||
// For gauge metrics (Unspecified temporality), rate/increase doesn't make sense
|
||||
if agg.Temporality == metrictypes.Unspecified {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"rate/increase aggregation cannot be used with gauge metrics (unspecified temporality)",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate percentile aggregations are only used with histogram types
|
||||
if agg.SpaceAggregation.IsPercentile() {
|
||||
if agg.Type != metrictypes.HistogramType && agg.Type != metrictypes.ExpHistogramType && agg.Type != metrictypes.SummaryType {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"percentile aggregation can only be used with histogram or summary metric types",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate time aggregation values
|
||||
validTimeAggregations := []metrictypes.TimeAggregation{
|
||||
metrictypes.TimeAggregationUnspecified,
|
||||
metrictypes.TimeAggregationLatest,
|
||||
metrictypes.TimeAggregationSum,
|
||||
metrictypes.TimeAggregationAvg,
|
||||
metrictypes.TimeAggregationMin,
|
||||
metrictypes.TimeAggregationMax,
|
||||
metrictypes.TimeAggregationCount,
|
||||
metrictypes.TimeAggregationCountDistinct,
|
||||
metrictypes.TimeAggregationRate,
|
||||
metrictypes.TimeAggregationIncrease,
|
||||
}
|
||||
|
||||
validTimeAgg := slices.Contains(validTimeAggregations, agg.TimeAggregation)
|
||||
if !validTimeAgg {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation: %s",
|
||||
agg.TimeAggregation.StringValue(),
|
||||
).WithAdditional(
|
||||
"Valid time aggregations: latest, sum, avg, min, max, count, count_distinct, rate, increase",
|
||||
)
|
||||
}
|
||||
|
||||
// Validate space aggregation values
|
||||
validSpaceAggregations := []metrictypes.SpaceAggregation{
|
||||
metrictypes.SpaceAggregationUnspecified,
|
||||
metrictypes.SpaceAggregationSum,
|
||||
metrictypes.SpaceAggregationAvg,
|
||||
metrictypes.SpaceAggregationMin,
|
||||
metrictypes.SpaceAggregationMax,
|
||||
metrictypes.SpaceAggregationCount,
|
||||
metrictypes.SpaceAggregationPercentile50,
|
||||
metrictypes.SpaceAggregationPercentile75,
|
||||
metrictypes.SpaceAggregationPercentile90,
|
||||
metrictypes.SpaceAggregationPercentile95,
|
||||
metrictypes.SpaceAggregationPercentile99,
|
||||
}
|
||||
|
||||
validSpaceAgg := slices.Contains(validSpaceAggregations, agg.SpaceAggregation)
|
||||
if !validSpaceAgg {
|
||||
return errors.NewInvalidInputf(
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation: %s",
|
||||
agg.SpaceAggregation.StringValue(),
|
||||
).WithAdditional(
|
||||
"Valid space aggregations: sum, avg, min, max, count, p50, p75, p90, p95, p99",
|
||||
)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -333,617 +333,6 @@ func TestQueryRangeRequest_ValidateAllQueriesNotDisabled(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
request QueryRangeRequest
|
||||
wantErr bool
|
||||
errMsg string
|
||||
}{
|
||||
{
|
||||
name: "empty composite query should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "at least one query is required",
|
||||
},
|
||||
{
|
||||
name: "duplicate builder query names should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "duplicate query name 'A'",
|
||||
},
|
||||
{
|
||||
name: "duplicate names across log and metric builder queries should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "X",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[MetricAggregation]{
|
||||
Name: "X",
|
||||
Disabled: true,
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "duplicate query name 'X'",
|
||||
},
|
||||
{
|
||||
name: "same name on formula and builder should not conflict",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "A",
|
||||
Expression: "A + 1",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "formula with empty expression should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "expression is required",
|
||||
},
|
||||
{
|
||||
name: "promql with empty query should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypePromQL,
|
||||
Spec: PromQuery{
|
||||
Name: "P1",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "PromQL query is required",
|
||||
},
|
||||
{
|
||||
name: "clickhouse with empty query should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeClickHouseSQL,
|
||||
Spec: ClickHouseQuery{
|
||||
Name: "CH1",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "ClickHouse SQL query is required",
|
||||
},
|
||||
{
|
||||
name: "trace operator with empty expression should return error",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeTraceOperator,
|
||||
Spec: QueryBuilderTraceOperator{
|
||||
Name: "TO1",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
errMsg: "expression is required",
|
||||
},
|
||||
{
|
||||
name: "valid promql query should pass",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypePromQL,
|
||||
Spec: PromQuery{
|
||||
Name: "P1",
|
||||
Query: "up",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid clickhouse query should pass",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeClickHouseSQL,
|
||||
Spec: ClickHouseQuery{
|
||||
Name: "CH1",
|
||||
Query: "SELECT count() FROM logs",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid mixed queries with unique builder names should pass",
|
||||
request: QueryRangeRequest{
|
||||
Start: 1640995200000,
|
||||
End: 1640998800000,
|
||||
RequestType: RequestTypeTimeSeries,
|
||||
CompositeQuery: CompositeQuery{
|
||||
Queries: []QueryEnvelope{
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "B",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []TraceAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: QueryTypePromQL,
|
||||
Spec: PromQuery{
|
||||
Name: "C",
|
||||
Query: "up",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.request.Validate()
|
||||
if tt.wantErr {
|
||||
if err == nil {
|
||||
t.Errorf("Validate() expected error but got none")
|
||||
return
|
||||
}
|
||||
if tt.errMsg != "" && !contains(err.Error(), tt.errMsg) {
|
||||
t.Errorf("Validate() error = %v, want to contain %v", err.Error(), tt.errMsg)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("Validate() unexpected error = %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidateQueryEnvelope(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
envelope QueryEnvelope
|
||||
requestType RequestType
|
||||
wantErr bool
|
||||
errMsg string
|
||||
}{
|
||||
{
|
||||
name: "valid builder query with trace aggregation",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeBuilder,
|
||||
Spec: QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []TraceAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid formula with expression",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "A + B",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "formula with empty expression should fail",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeFormula,
|
||||
Spec: QueryBuilderFormula{
|
||||
Name: "F1",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: true,
|
||||
errMsg: "expression is required",
|
||||
},
|
||||
{
|
||||
name: "valid join spec",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeJoin,
|
||||
Spec: QueryBuilderJoin{
|
||||
Name: "J1",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "valid trace operator",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeTraceOperator,
|
||||
Spec: QueryBuilderTraceOperator{
|
||||
Name: "TO1",
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "trace operator with empty expression should fail",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeTraceOperator,
|
||||
Spec: QueryBuilderTraceOperator{
|
||||
Name: "TO1",
|
||||
Expression: "",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: true,
|
||||
errMsg: "expression is required",
|
||||
},
|
||||
{
|
||||
name: "promql with empty query should fail",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypePromQL,
|
||||
Spec: PromQuery{
|
||||
Name: "P1",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: true,
|
||||
errMsg: "PromQL query is required",
|
||||
},
|
||||
{
|
||||
name: "clickhouse with empty query should fail",
|
||||
envelope: QueryEnvelope{
|
||||
Type: QueryTypeClickHouseSQL,
|
||||
Spec: ClickHouseQuery{
|
||||
Name: "CH1",
|
||||
Query: "",
|
||||
},
|
||||
},
|
||||
requestType: RequestTypeTimeSeries,
|
||||
wantErr: true,
|
||||
errMsg: "ClickHouse SQL query is required",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := validateQueryEnvelope(tt.envelope, tt.requestType)
|
||||
if tt.wantErr {
|
||||
if err == nil {
|
||||
t.Errorf("validateQueryEnvelope() expected error but got none")
|
||||
return
|
||||
}
|
||||
if tt.errMsg != "" && !contains(err.Error(), tt.errMsg) {
|
||||
t.Errorf("validateQueryEnvelope() error = %v, want to contain %v", err.Error(), tt.errMsg)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("validateQueryEnvelope() unexpected error = %v", err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryEnvelope_Helpers(t *testing.T) {
|
||||
t.Run("queryName", func(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
envelope QueryEnvelope
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "trace builder query",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[TraceAggregation]{Name: "A"}},
|
||||
want: "A",
|
||||
},
|
||||
{
|
||||
name: "log builder query",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Name: "B"}},
|
||||
want: "B",
|
||||
},
|
||||
{
|
||||
name: "metric builder query",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[MetricAggregation]{Name: "C"}},
|
||||
want: "C",
|
||||
},
|
||||
{
|
||||
name: "formula",
|
||||
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{Name: "F1"}},
|
||||
want: "F1",
|
||||
},
|
||||
{
|
||||
name: "promql",
|
||||
envelope: QueryEnvelope{Type: QueryTypePromQL, Spec: PromQuery{Name: "P1"}},
|
||||
want: "P1",
|
||||
},
|
||||
{
|
||||
name: "clickhouse",
|
||||
envelope: QueryEnvelope{Type: QueryTypeClickHouseSQL, Spec: ClickHouseQuery{Name: "CH1"}},
|
||||
want: "CH1",
|
||||
},
|
||||
{
|
||||
name: "trace operator",
|
||||
envelope: QueryEnvelope{Type: QueryTypeTraceOperator, Spec: QueryBuilderTraceOperator{Name: "TO1"}},
|
||||
want: "TO1",
|
||||
},
|
||||
{
|
||||
name: "join",
|
||||
envelope: QueryEnvelope{Type: QueryTypeJoin, Spec: QueryBuilderJoin{Name: "J1"}},
|
||||
want: "J1",
|
||||
},
|
||||
{
|
||||
name: "empty name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{}},
|
||||
want: "",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.envelope.queryName()
|
||||
if got != tt.want {
|
||||
t.Errorf("queryName() = %q, want %q", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("isDisabled", func(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
envelope QueryEnvelope
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "enabled builder query",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Disabled: false}},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "disabled builder query",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Disabled: true}},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "disabled formula",
|
||||
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{Disabled: true}},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "enabled promql",
|
||||
envelope: QueryEnvelope{Type: QueryTypePromQL, Spec: PromQuery{Disabled: false}},
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "disabled clickhouse",
|
||||
envelope: QueryEnvelope{Type: QueryTypeClickHouseSQL, Spec: ClickHouseQuery{Disabled: true}},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "disabled trace operator",
|
||||
envelope: QueryEnvelope{Type: QueryTypeTraceOperator, Spec: QueryBuilderTraceOperator{Disabled: true}},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "disabled join",
|
||||
envelope: QueryEnvelope{Type: QueryTypeJoin, Spec: QueryBuilderJoin{Disabled: true}},
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.envelope.isDisabled()
|
||||
if got != tt.want {
|
||||
t.Errorf("isDisabled() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetQueryIdentifier(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
envelope QueryEnvelope
|
||||
index int
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "builder query with name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{Name: "A"}},
|
||||
index: 0,
|
||||
want: "query 'A'",
|
||||
},
|
||||
{
|
||||
name: "builder query without name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeBuilder, Spec: QueryBuilderQuery[LogAggregation]{}},
|
||||
index: 2,
|
||||
want: "query at position 3",
|
||||
},
|
||||
{
|
||||
name: "formula with name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{Name: "F1"}},
|
||||
index: 0,
|
||||
want: "formula 'F1'",
|
||||
},
|
||||
{
|
||||
name: "formula without name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeFormula, Spec: QueryBuilderFormula{}},
|
||||
index: 1,
|
||||
want: "formula at position 2",
|
||||
},
|
||||
{
|
||||
name: "promql with name",
|
||||
envelope: QueryEnvelope{Type: QueryTypePromQL, Spec: PromQuery{Name: "P1"}},
|
||||
index: 0,
|
||||
want: "PromQL query 'P1'",
|
||||
},
|
||||
{
|
||||
name: "clickhouse with name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeClickHouseSQL, Spec: ClickHouseQuery{Name: "CH1"}},
|
||||
index: 0,
|
||||
want: "ClickHouse query 'CH1'",
|
||||
},
|
||||
{
|
||||
name: "trace operator with name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeTraceOperator, Spec: QueryBuilderTraceOperator{Name: "TO1"}},
|
||||
index: 0,
|
||||
want: "trace operator 'TO1'",
|
||||
},
|
||||
{
|
||||
name: "join without name",
|
||||
envelope: QueryEnvelope{Type: QueryTypeJoin, Spec: QueryBuilderJoin{}},
|
||||
index: 0,
|
||||
want: "join at position 1",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := getQueryIdentifier(tt.envelope, tt.index)
|
||||
if got != tt.want {
|
||||
t.Errorf("getQueryIdentifier() = %q, want %q", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestQueryRangeRequest_ValidateOrderByForAggregation(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
@@ -1123,139 +512,4 @@ func TestQueryRangeRequest_ValidateOrderByForAggregation(t *testing.T) {
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRequestType_IsAggregation(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
requestType RequestType
|
||||
want bool
|
||||
}{
|
||||
{"time_series is aggregation", RequestTypeTimeSeries, true},
|
||||
{"scalar is aggregation", RequestTypeScalar, true},
|
||||
{"distribution is aggregation", RequestTypeDistribution, true},
|
||||
{"raw is not aggregation", RequestTypeRaw, false},
|
||||
{"raw_stream is not aggregation", RequestTypeRawStream, false},
|
||||
{"trace is not aggregation", RequestTypeTrace, false},
|
||||
{"unknown is not aggregation", RequestTypeUnknown, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := tt.requestType.IsAggregation()
|
||||
if got != tt.want {
|
||||
t.Errorf("IsAggregation() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNonAggregationFieldsSkipped(t *testing.T) {
|
||||
// Fields that only apply to aggregation queries (groupBy, having, aggregations)
|
||||
// should be silently skipped for non-aggregation request types.
|
||||
t.Run("groupBy ignored for raw request type", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
GroupBy: []GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
},
|
||||
}
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for groupBy with raw request type, got: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("groupBy validated for timeseries request type", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
GroupBy: []GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: ""}},
|
||||
},
|
||||
}
|
||||
err := query.Validate(RequestTypeTimeSeries)
|
||||
if err == nil {
|
||||
t.Errorf("expected error for empty groupBy key with timeseries request type")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("having ignored for raw request type", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Having: &Having{Expression: "count() > 10"},
|
||||
}
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for having with raw request type, got: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("having ignored for trace request type", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Having: &Having{Expression: "count() > 10"},
|
||||
}
|
||||
err := query.Validate(RequestTypeTrace)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for having with trace request type, got: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("aggregations ignored for raw request type", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
}
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for aggregations with raw request type, got: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("aggregations ignored for raw_stream request type", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[LogAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Aggregations: []LogAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
}
|
||||
err := query.Validate(RequestTypeRawStream)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for aggregations with raw_stream request type, got: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("selectFields validated for raw but not timeseries", func(t *testing.T) {
|
||||
query := QueryBuilderQuery[TraceAggregation]{
|
||||
Name: "A",
|
||||
Signal: telemetrytypes.SignalTraces,
|
||||
Aggregations: []TraceAggregation{
|
||||
{Expression: "count()"},
|
||||
},
|
||||
SelectFields: []telemetrytypes.TelemetryFieldKey{
|
||||
{Name: "isRoot"},
|
||||
},
|
||||
}
|
||||
// Should error for raw (selectFields are validated)
|
||||
err := query.Validate(RequestTypeRaw)
|
||||
if err == nil {
|
||||
t.Errorf("expected error for isRoot in selectFields with raw request type")
|
||||
}
|
||||
// Should pass for timeseries (selectFields skipped)
|
||||
err = query.Validate(RequestTypeTimeSeries)
|
||||
if err != nil {
|
||||
t.Errorf("expected no error for isRoot in selectFields with timeseries request type, got: %v", err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -78,18 +78,18 @@ func (f *TelemetryFieldKey) ArrayParentSelectors() []*FieldKeySelector {
|
||||
|
||||
func (f TelemetryFieldKey) String() string {
|
||||
var sb strings.Builder
|
||||
fmt.Fprintf(&sb, "name=%s", f.Name)
|
||||
sb.WriteString(fmt.Sprintf("name=%s", f.Name))
|
||||
if f.FieldContext != FieldContextUnspecified {
|
||||
fmt.Fprintf(&sb, ",context=%s", f.FieldContext.String)
|
||||
sb.WriteString(fmt.Sprintf(",context=%s", f.FieldContext.String))
|
||||
}
|
||||
if f.FieldDataType != FieldDataTypeUnspecified {
|
||||
fmt.Fprintf(&sb, ",datatype=%s", f.FieldDataType.StringValue())
|
||||
sb.WriteString(fmt.Sprintf(",datatype=%s", f.FieldDataType.StringValue()))
|
||||
}
|
||||
if f.Materialized {
|
||||
sb.WriteString(",materialized=true")
|
||||
}
|
||||
if f.JSONDataType != nil {
|
||||
fmt.Fprintf(&sb, ",jsondatatype=%s", f.JSONDataType.StringValue())
|
||||
sb.WriteString(fmt.Sprintf(",jsondatatype=%s", f.JSONDataType.StringValue()))
|
||||
}
|
||||
if len(f.Indexes) > 0 {
|
||||
sb.WriteString(",indexes=[")
|
||||
@@ -97,7 +97,7 @@ func (f TelemetryFieldKey) String() string {
|
||||
if i > 0 {
|
||||
sb.WriteString("; ")
|
||||
}
|
||||
fmt.Fprintf(&sb, "{type=%s, columnExpr=%s, indexExpr=%s}", index.Type.StringValue(), index.ColumnExpression, index.IndexExpression)
|
||||
sb.WriteString(fmt.Sprintf("{type=%s, columnExpr=%s, indexExpr=%s}", index.Type.StringValue(), index.ColumnExpression, index.IndexExpression))
|
||||
}
|
||||
sb.WriteString("]")
|
||||
}
|
||||
@@ -108,17 +108,6 @@ func (f TelemetryFieldKey) Text() string {
|
||||
return TelemetryFieldKeyToText(&f)
|
||||
}
|
||||
|
||||
// OverrideMetadataFrom copies the resolved metadata fields from src into f.
|
||||
// This is used when adjusting user-provided keys to match known field definitions.
|
||||
func (f *TelemetryFieldKey) OverrideMetadataFrom(src *TelemetryFieldKey) {
|
||||
f.FieldContext = src.FieldContext
|
||||
f.FieldDataType = src.FieldDataType
|
||||
f.JSONDataType = src.JSONDataType
|
||||
f.Indexes = src.Indexes
|
||||
f.Materialized = src.Materialized
|
||||
f.JSONPlan = src.JSONPlan
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) Equal(key *TelemetryFieldKey) bool {
|
||||
return f.Name == key.Name &&
|
||||
f.FieldContext == key.FieldContext &&
|
||||
@@ -236,19 +225,11 @@ func TelemetryFieldKeyToText(key *TelemetryFieldKey) string {
|
||||
}
|
||||
|
||||
func FieldKeyToMaterializedColumnName(key *TelemetryFieldKey) string {
|
||||
return fmt.Sprintf("`%s_%s_%s`",
|
||||
key.FieldContext.String,
|
||||
fieldDataTypes[key.FieldDataType.StringValue()].StringValue(),
|
||||
strings.ReplaceAll(key.Name, ".", "$$"),
|
||||
)
|
||||
return fmt.Sprintf("`%s_%s_%s`", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
|
||||
}
|
||||
|
||||
func FieldKeyToMaterializedColumnNameForExists(key *TelemetryFieldKey) string {
|
||||
return fmt.Sprintf("`%s_%s_%s_exists`",
|
||||
key.FieldContext.String,
|
||||
fieldDataTypes[key.FieldDataType.StringValue()].StringValue(),
|
||||
strings.ReplaceAll(key.Name, ".", "$$"),
|
||||
)
|
||||
return fmt.Sprintf("`%s_%s_%s_exists`", key.FieldContext.String, fieldDataTypes[key.FieldDataType.StringValue()].StringValue(), strings.ReplaceAll(key.Name, ".", "$$"))
|
||||
}
|
||||
|
||||
type TelemetryFieldValues struct {
|
||||
|
||||
@@ -14,10 +14,10 @@ from fixtures.alertutils import (
|
||||
from fixtures.logger import setup_logger
|
||||
from fixtures.utils import get_testdata_file_path
|
||||
|
||||
# Alert test cases use a 30-second wait time to verify expected alert firing.
|
||||
# Alert data is set up to trigger on the first rule manager evaluation.
|
||||
# With a 15-second eval frequency for most rules, plus alertmanager's
|
||||
# group_wait and group_interval delays, alerts should fire well within 30 seconds.
|
||||
# test cases for match type and compare operators have wait time of 30 seconds to verify the alert expectation.
|
||||
# we've poistioned the alert data to fire the alert on first eval of rule manager, the eval frequency
|
||||
# for most alert rules are set of 15s so considering this delay plus some delay from alert manager's
|
||||
# group_wait and group_interval, even in worst case most alerts should be triggered in about 30 seconds
|
||||
TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_above_at_least_once",
|
||||
@@ -25,7 +25,6 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
alert_data=[
|
||||
types.AlertData(
|
||||
type="metrics",
|
||||
# active requests dummy data
|
||||
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
|
||||
),
|
||||
],
|
||||
@@ -116,28 +115,30 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
],
|
||||
),
|
||||
),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_above_last",
|
||||
rule_path="alerts/test_scenarios/threshold_above_last/rule.json",
|
||||
alert_data=[
|
||||
types.AlertData(
|
||||
type="metrics",
|
||||
data_path="alerts/test_scenarios/threshold_above_last/alert_data.jsonl",
|
||||
),
|
||||
],
|
||||
alert_expectation=types.AlertExpectation(
|
||||
should_alert=True,
|
||||
wait_time_seconds=30,
|
||||
expected_alerts=[
|
||||
types.FiringAlert(
|
||||
labels={
|
||||
"alertname": "threshold_above_last",
|
||||
"threshold.name": "critical",
|
||||
}
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
# TODO: @abhishekhugetech enable the test for matchType last, pylint: disable=W0511
|
||||
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed
|
||||
# types.AlertTestCase(
|
||||
# name="test_threshold_above_last",
|
||||
# rule_path="alerts/test_scenarios/threshold_above_last/rule.json",
|
||||
# alert_data=[
|
||||
# types.AlertData(
|
||||
# type="metrics",
|
||||
# data_path="alerts/test_scenarios/threshold_above_last/alert_data.jsonl",
|
||||
# ),
|
||||
# ],
|
||||
# alert_expectation=types.AlertExpectation(
|
||||
# should_alert=True,
|
||||
# wait_time_seconds=30,
|
||||
# expected_alerts=[
|
||||
# types.FiringAlert(
|
||||
# labels={
|
||||
# "alertname": "threshold_above_last",
|
||||
# "threshold.name": "critical",
|
||||
# }
|
||||
# ),
|
||||
# ],
|
||||
# ),
|
||||
# ),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_below_at_least_once",
|
||||
rule_path="alerts/test_scenarios/threshold_below_at_least_once/rule.json",
|
||||
@@ -188,7 +189,6 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
alert_data=[
|
||||
types.AlertData(
|
||||
type="metrics",
|
||||
# one rate ~5 + rest 0.01 so it remains in total below 10
|
||||
data_path="alerts/test_scenarios/threshold_below_in_total/alert_data.jsonl",
|
||||
),
|
||||
],
|
||||
@@ -227,28 +227,30 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
],
|
||||
),
|
||||
),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_below_last",
|
||||
rule_path="alerts/test_scenarios/threshold_below_last/rule.json",
|
||||
alert_data=[
|
||||
types.AlertData(
|
||||
type="metrics",
|
||||
data_path="alerts/test_scenarios/threshold_below_last/alert_data.jsonl",
|
||||
),
|
||||
],
|
||||
alert_expectation=types.AlertExpectation(
|
||||
should_alert=True,
|
||||
wait_time_seconds=30,
|
||||
expected_alerts=[
|
||||
types.FiringAlert(
|
||||
labels={
|
||||
"alertname": "threshold_below_last",
|
||||
"threshold.name": "critical",
|
||||
}
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
# TODO: @abhishekhugetech enable the test for matchType last,
|
||||
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
|
||||
# types.AlertTestCase(
|
||||
# name="test_threshold_below_last",
|
||||
# rule_path="alerts/test_scenarios/threshold_below_last/rule.json",
|
||||
# alert_data=[
|
||||
# types.AlertData(
|
||||
# type="metrics",
|
||||
# data_path="alerts/test_scenarios/threshold_below_last/alert_data.jsonl",
|
||||
# ),
|
||||
# ],
|
||||
# alert_expectation=types.AlertExpectation(
|
||||
# should_alert=True,
|
||||
# wait_time_seconds=30,
|
||||
# expected_alerts=[
|
||||
# types.FiringAlert(
|
||||
# labels={
|
||||
# "alertname": "threshold_below_last",
|
||||
# "threshold.name": "critical",
|
||||
# }
|
||||
# ),
|
||||
# ],
|
||||
# ),
|
||||
# ),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_equal_to_at_least_once",
|
||||
rule_path="alerts/test_scenarios/threshold_equal_to_at_least_once/rule.json",
|
||||
@@ -337,28 +339,30 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
],
|
||||
),
|
||||
),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_equal_to_last",
|
||||
rule_path="alerts/test_scenarios/threshold_equal_to_last/rule.json",
|
||||
alert_data=[
|
||||
types.AlertData(
|
||||
type="metrics",
|
||||
data_path="alerts/test_scenarios/threshold_equal_to_last/alert_data.jsonl",
|
||||
),
|
||||
],
|
||||
alert_expectation=types.AlertExpectation(
|
||||
should_alert=True,
|
||||
wait_time_seconds=30,
|
||||
expected_alerts=[
|
||||
types.FiringAlert(
|
||||
labels={
|
||||
"alertname": "threshold_equal_to_last",
|
||||
"threshold.name": "critical",
|
||||
}
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
# TODO: @abhishekhugetech enable the test for matchType last,
|
||||
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
|
||||
# types.AlertTestCase(
|
||||
# name="test_threshold_equal_to_last",
|
||||
# rule_path="alerts/test_scenarios/threshold_equal_to_last/rule.json",
|
||||
# alert_data=[
|
||||
# types.AlertData(
|
||||
# type="metrics",
|
||||
# data_path="alerts/test_scenarios/threshold_equal_to_last/alert_data.jsonl",
|
||||
# ),
|
||||
# ],
|
||||
# alert_expectation=types.AlertExpectation(
|
||||
# should_alert=True,
|
||||
# wait_time_seconds=30,
|
||||
# expected_alerts=[
|
||||
# types.FiringAlert(
|
||||
# labels={
|
||||
# "alertname": "threshold_equal_to_last",
|
||||
# "threshold.name": "critical",
|
||||
# }
|
||||
# ),
|
||||
# ],
|
||||
# ),
|
||||
# ),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_not_equal_to_at_least_once",
|
||||
rule_path="alerts/test_scenarios/threshold_not_equal_to_at_least_once/rule.json",
|
||||
@@ -447,28 +451,30 @@ TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
|
||||
],
|
||||
),
|
||||
),
|
||||
types.AlertTestCase(
|
||||
name="test_threshold_not_equal_to_last",
|
||||
rule_path="alerts/test_scenarios/threshold_not_equal_to_last/rule.json",
|
||||
alert_data=[
|
||||
types.AlertData(
|
||||
type="metrics",
|
||||
data_path="alerts/test_scenarios/threshold_not_equal_to_last/alert_data.jsonl",
|
||||
),
|
||||
],
|
||||
alert_expectation=types.AlertExpectation(
|
||||
should_alert=True,
|
||||
wait_time_seconds=30,
|
||||
expected_alerts=[
|
||||
types.FiringAlert(
|
||||
labels={
|
||||
"alertname": "threshold_not_equal_to_last",
|
||||
"threshold.name": "critical",
|
||||
}
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
# TODO: @abhishekhugetech enable the test for matchType last,
|
||||
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
|
||||
# types.AlertTestCase(
|
||||
# name="test_threshold_not_equal_to_last",
|
||||
# rule_path="alerts/test_scenarios/threshold_not_equal_to_last/rule.json",
|
||||
# alert_data=[
|
||||
# types.AlertData(
|
||||
# type="metrics",
|
||||
# data_path="alerts/test_scenarios/threshold_not_equal_to_last/alert_data.jsonl",
|
||||
# ),
|
||||
# ],
|
||||
# alert_expectation=types.AlertExpectation(
|
||||
# should_alert=True,
|
||||
# wait_time_seconds=30,
|
||||
# expected_alerts=[
|
||||
# types.FiringAlert(
|
||||
# labels={
|
||||
# "alertname": "threshold_not_equal_to_last",
|
||||
# "threshold.name": "critical",
|
||||
# }
|
||||
# ),
|
||||
# ],
|
||||
# ),
|
||||
# ),
|
||||
]
|
||||
|
||||
# test cases unit conversion
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -54,17 +54,17 @@ def test_rate_with_steady_values_and_reset(
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) >= 58
|
||||
assert len(result_values) >= 59
|
||||
# the counter reset happened at 31st minute
|
||||
assert (
|
||||
result_values[29]["value"] == 0.0167
|
||||
result_values[30]["value"] == 0.0167
|
||||
) # i.e 2/120 i.e 29th to 31st minute changes
|
||||
assert (
|
||||
result_values[30]["value"] == 0.133
|
||||
result_values[31]["value"] == 0.133
|
||||
) # i.e 10/60 i.e 31st to 32nd minute changes
|
||||
count_of_steady_rate = sum(1 for v in result_values if v["value"] == 0.0833)
|
||||
assert (
|
||||
count_of_steady_rate >= 55
|
||||
count_of_steady_rate >= 56
|
||||
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
|
||||
# All rates should be non-negative (stale periods = 0 rate)
|
||||
for v in result_values:
|
||||
|
||||
@@ -72,17 +72,16 @@ def test_with_steady_values_and_reset(
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) >= 58
|
||||
assert len(result_values) >= 59
|
||||
# the counter reset happened at 31st minute
|
||||
# we skip the rate value for the first data point without previous value
|
||||
assert result_values[29]["value"] == expected_value_at_31st_minute
|
||||
assert result_values[30]["value"] == expected_value_at_32nd_minute
|
||||
assert result_values[30]["value"] == expected_value_at_31st_minute
|
||||
assert result_values[31]["value"] == expected_value_at_32nd_minute
|
||||
assert (
|
||||
result_values[38]["value"] == steady_value
|
||||
) # 38th minute is when cumulative shifts to delta
|
||||
result_values[39]["value"] == steady_value
|
||||
) # 39th minute is when cumulative shifts to delta
|
||||
count_of_steady_rate = sum(1 for v in result_values if v["value"] == steady_value)
|
||||
assert (
|
||||
count_of_steady_rate >= 55
|
||||
count_of_steady_rate >= 56
|
||||
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
|
||||
# All rates should be non-negative (stale periods = 0 rate)
|
||||
for v in result_values:
|
||||
@@ -317,12 +316,12 @@ def test_for_service_with_switch(
|
||||
|
||||
data = response.json()
|
||||
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
|
||||
assert len(result_values) >= 59
|
||||
assert result_values[29]["value"] == expected_value_at_30th_minute # 0.183
|
||||
assert result_values[30]["value"] == expected_value_at_31st_minute # 0.183
|
||||
assert result_values[37]["value"] == value_at_switch # 0.25
|
||||
assert len(result_values) >= 60
|
||||
assert result_values[30]["value"] == expected_value_at_30th_minute # 0.183
|
||||
assert result_values[31]["value"] == expected_value_at_31st_minute # 0.183
|
||||
assert result_values[38]["value"] == value_at_switch # 0.25
|
||||
assert (
|
||||
result_values[38]["value"] == value_at_switch # 0.25
|
||||
result_values[39]["value"] == value_at_switch # 0.25
|
||||
) # 39th minute is when cumulative shifts to delta
|
||||
# All rates should be non-negative (stale periods = 0 rate)
|
||||
for v in result_values:
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:01:00+00:00","value":1,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:02:00+00:00","value":2,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:03:00+00:00","value":4,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:04:00+00:00","value":4,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:05:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:06:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:07:00+00:00","value":36,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:08:00+00:00","value":25,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:09:00+00:00","value":37,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:10:00+00:00","value":35,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:11:00+00:00","value":39,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:12:00+00:00","value":25,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:01:00+00:00","value":1,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:02:00+00:00","value":2,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:03:00+00:00","value":3,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:04:00+00:00","value":4,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:05:00+00:00","value":19,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:06:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:07:00+00:00","value":35,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:08:00+00:00","value":36,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:09:00+00:00","value":37,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:10:00+00:00","value":38,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:11:00+00:00","value":39,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"request_total_threshold_above_at_least_once","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:12:00+00:00","value":40,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
"type": "clickhouse_sql",
|
||||
"spec": {
|
||||
"name": "A",
|
||||
"query": "WITH __temporal_aggregation_cte AS (\n SELECT \n fingerprint, \n toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60)) AS ts, \n avg(value) AS per_series_value \n FROM signoz_metrics.distributed_samples_v4 AS points \n INNER JOIN (\n SELECT fingerprint \n FROM signoz_metrics.time_series_v4 \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND LOWER(temporality) LIKE LOWER('cumulative') \n AND __normalized = false \n GROUP BY fingerprint\n ) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND unix_milli >= $start_timestamp_ms \n AND unix_milli < $end_timestamp_ms \n GROUP BY fingerprint, ts \n ORDER BY fingerprint, ts\n), \n__spatial_aggregation_cte AS (\n SELECT \n ts, \n sum(per_series_value) AS value \n FROM __temporal_aggregation_cte \n WHERE isNaN(per_series_value) = 0 \n GROUP BY ts\n) \nSELECT * FROM __spatial_aggregation_cte \nORDER BY ts"
|
||||
"query": "WITH __temporal_aggregation_cte AS (\n SELECT \n fingerprint, \n toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(60)) AS ts, \n avg(value) AS per_series_value \n FROM signoz_metrics.distributed_samples_v4 AS points \n INNER JOIN (\n SELECT fingerprint \n FROM signoz_metrics.time_series_v4 \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND LOWER(temporality) LIKE LOWER('cumulative') \n AND __normalized = false \n GROUP BY fingerprint\n ) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint \n WHERE metric_name IN ('request_total_threshold_above_at_least_once') \n AND unix_milli >= {{.start_timestamp_ms}} \n AND unix_milli < {{.end_timestamp_ms}} \n GROUP BY fingerprint, ts \n ORDER BY fingerprint, ts\n), \n__spatial_aggregation_cte AS (\n SELECT \n ts, \n avg(per_series_value) AS value \n FROM __temporal_aggregation_cte \n WHERE isNaN(per_series_value) = 0 \n GROUP BY ts\n) \nSELECT * FROM __spatial_aggregation_cte \nORDER BY ts"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":12,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":31,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":23,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":58,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":71,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":45,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":81,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":86,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":91,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":31,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":46,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":58,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":71,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":76,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":81,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":86,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_threshold_above_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":91,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":45,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":60,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":65,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":34,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":75,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":5,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":10,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":20,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":30,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":40,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":50,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":55,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":60,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":65,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":70,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_mb_threshold_equal_to_last","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":75,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":524288,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":1048576,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":1572864,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":2097152,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":3770016,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":5642880,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":10515744,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":11038632,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":11561520,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":12084408,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":12607296,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":13130184,"temporality":"Cumulative","type_":"Sum","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:01:00+00:00","value":524288,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:02:00+00:00","value":1048576,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:03:00+00:00","value":1572864,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:04:00+00:00","value":2097152,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:05:00+00:00","value":3770016,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:06:00+00:00","value":5642880,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:07:00+00:00","value":10515744,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:08:00+00:00","value":11038632,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:09:00+00:00","value":11561520,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:10:00+00:00","value":12084408,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:11:00+00:00","value":12607296,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
{"metric_name":"disk_usage_unit_conversion_bytes_to_mb","labels":{"device":"/dev/sda1","mountpoint":"/"},"timestamp":"2026-01-29T10:12:00+00:00","value":13130184,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
|
||||
|
||||
Reference in New Issue
Block a user