mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-12 20:42:07 +00:00
Compare commits
20 Commits
imp/remove
...
SIG-8704
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2df36c11f6 | ||
|
|
a33b13c166 | ||
|
|
700e021228 | ||
|
|
5a0b850e8c | ||
|
|
70d542e3fe | ||
|
|
cd525221e5 | ||
|
|
7128d78415 | ||
|
|
9edc1fd180 | ||
|
|
de36acf5f1 | ||
|
|
299c3c1135 | ||
|
|
46cf13cd2f | ||
|
|
674556d672 | ||
|
|
af987e53ce | ||
|
|
59d5accd33 | ||
|
|
5a7ad670d8 | ||
|
|
9d04b397ac | ||
|
|
a4f3be5e46 | ||
|
|
8f833fa62c | ||
|
|
7029233596 | ||
|
|
d26efd2833 |
@@ -24,7 +24,7 @@ services:
|
||||
depends_on:
|
||||
- zookeeper
|
||||
zookeeper:
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
container_name: zookeeper
|
||||
volumes:
|
||||
- ${PWD}/fs/tmp/zookeeper:/bitnami/zookeeper
|
||||
|
||||
@@ -39,7 +39,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
deploy:
|
||||
labels:
|
||||
|
||||
@@ -38,7 +38,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
deploy:
|
||||
labels:
|
||||
|
||||
@@ -42,7 +42,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
|
||||
@@ -38,7 +38,7 @@ x-clickhouse-defaults: &clickhouse-defaults
|
||||
hard: 262144
|
||||
x-zookeeper-defaults: &zookeeper-defaults
|
||||
!!merge <<: *common
|
||||
image: bitnami/zookeeper:3.7.1
|
||||
image: signoz/zookeeper:3.7.1
|
||||
user: root
|
||||
labels:
|
||||
signoz.io/scrape: "true"
|
||||
|
||||
@@ -48,6 +48,6 @@
|
||||
"INFRASTRUCTURE_MONITORING_HOSTS": "SigNoz | Infra Monitoring",
|
||||
"INFRASTRUCTURE_MONITORING_KUBERNETES": "SigNoz | Infra Monitoring",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_BASE": "SigNoz | Meter Explorer"
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
}
|
||||
|
||||
@@ -71,6 +71,6 @@
|
||||
"METRICS_EXPLORER_VIEWS": "SigNoz | Metrics Explorer",
|
||||
"API_MONITORING": "SigNoz | External APIs",
|
||||
"METER_EXPLORER": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer",
|
||||
"METER_EXPLORER_BASE": "SigNoz | Meter Explorer"
|
||||
"METER_EXPLORER_VIEWS": "SigNoz | Meter Explorer Views",
|
||||
"METER": "SigNoz | Meter"
|
||||
}
|
||||
|
||||
@@ -437,10 +437,10 @@ const routes: AppRoutes[] = [
|
||||
},
|
||||
|
||||
{
|
||||
path: ROUTES.METER_EXPLORER_BASE,
|
||||
path: ROUTES.METER,
|
||||
exact: true,
|
||||
component: MeterExplorer,
|
||||
key: 'METER_EXPLORER_BASE',
|
||||
key: 'METER',
|
||||
isPrivate: true,
|
||||
},
|
||||
{
|
||||
|
||||
@@ -137,5 +137,11 @@
|
||||
h6 {
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
code {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,12 +82,14 @@ function QuerySearch({
|
||||
dataSource,
|
||||
onRun,
|
||||
signalSource,
|
||||
isMetricsExplorer = false,
|
||||
}: {
|
||||
onChange: (value: string) => void;
|
||||
queryData: IBuilderQuery;
|
||||
dataSource: DataSource;
|
||||
signalSource?: string;
|
||||
onRun?: (query: string) => void;
|
||||
isMetricsExplorer?: boolean;
|
||||
}): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [query, setQuery] = useState<string>(queryData.filter?.expression || '');
|
||||
@@ -208,7 +210,8 @@ function QuerySearch({
|
||||
async (searchText?: string): Promise<void> => {
|
||||
if (
|
||||
dataSource === DataSource.METRICS &&
|
||||
!queryData.aggregateAttribute?.key
|
||||
!queryData.aggregateAttribute?.key &&
|
||||
!isMetricsExplorer
|
||||
) {
|
||||
setKeySuggestions([]);
|
||||
return;
|
||||
@@ -249,6 +252,7 @@ function QuerySearch({
|
||||
toggleSuggestions,
|
||||
queryData.aggregateAttribute?.key,
|
||||
signalSource,
|
||||
isMetricsExplorer,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -1453,6 +1457,7 @@ function QuerySearch({
|
||||
QuerySearch.defaultProps = {
|
||||
onRun: undefined,
|
||||
signalSource: '',
|
||||
isMetricsExplorer: false,
|
||||
};
|
||||
|
||||
export default QuerySearch;
|
||||
|
||||
@@ -5,8 +5,11 @@ import { SignalType } from 'components/QuickFilters/types';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetAggregateKeys } from 'hooks/queryBuilder/useGetAggregateKeys';
|
||||
import { useGetAttributeSuggestions } from 'hooks/queryBuilder/useGetAttributeSuggestions';
|
||||
import { useGetQueryKeySuggestions } from 'hooks/querySuggestions/useGetQueryKeySuggestions';
|
||||
import { useMemo } from 'react';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { Filter as FilterType } from 'types/api/quickFilters/getCustomFilters';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -40,6 +43,10 @@ function OtherFilters({
|
||||
() => SIGNAL_DATA_SOURCE_MAP[signal as SignalType] === DataSource.LOGS,
|
||||
[signal],
|
||||
);
|
||||
const isMeterDataSource = useMemo(
|
||||
() => signal && signal === SignalType.METER_EXPLORER,
|
||||
[signal],
|
||||
);
|
||||
|
||||
const {
|
||||
data: suggestionsData,
|
||||
@@ -69,7 +76,22 @@ function OtherFilters({
|
||||
},
|
||||
{
|
||||
queryKey: [REACT_QUERY_KEY.GET_OTHER_FILTERS, inputValue],
|
||||
enabled: !!signal && !isLogDataSource,
|
||||
enabled: !!signal && !isLogDataSource && !isMeterDataSource,
|
||||
},
|
||||
);
|
||||
|
||||
const {
|
||||
data: fieldKeysData,
|
||||
isLoading: isLoadingFieldKeys,
|
||||
} = useGetQueryKeySuggestions(
|
||||
{
|
||||
searchText: inputValue,
|
||||
signal: SIGNAL_DATA_SOURCE_MAP[signal as SignalType],
|
||||
signalSource: 'meter',
|
||||
},
|
||||
{
|
||||
queryKey: [REACT_QUERY_KEY.GET_OTHER_FILTERS, inputValue],
|
||||
enabled: !!signal && isMeterDataSource,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -77,13 +99,33 @@ function OtherFilters({
|
||||
let filterAttributes;
|
||||
if (isLogDataSource) {
|
||||
filterAttributes = suggestionsData?.payload?.attributes || [];
|
||||
} else if (isMeterDataSource) {
|
||||
const fieldKeys: QueryKeyDataSuggestionsProps[] = Object.values(
|
||||
fieldKeysData?.data?.data?.keys || {},
|
||||
)?.flat();
|
||||
filterAttributes = fieldKeys.map(
|
||||
(attr) =>
|
||||
({
|
||||
key: attr.name,
|
||||
dataType: attr.fieldDataType,
|
||||
type: attr.fieldContext,
|
||||
signal: attr.signal,
|
||||
} as BaseAutocompleteData),
|
||||
);
|
||||
} else {
|
||||
filterAttributes = aggregateKeysData?.payload?.attributeKeys || [];
|
||||
}
|
||||
return filterAttributes?.filter(
|
||||
(attr) => !addedFilters.some((filter) => filter.key === attr.key),
|
||||
);
|
||||
}, [suggestionsData, aggregateKeysData, addedFilters, isLogDataSource]);
|
||||
}, [
|
||||
suggestionsData,
|
||||
aggregateKeysData,
|
||||
addedFilters,
|
||||
isLogDataSource,
|
||||
fieldKeysData,
|
||||
isMeterDataSource,
|
||||
]);
|
||||
|
||||
const handleAddFilter = (filter: FilterType): void => {
|
||||
setAddedFilters((prev) => [
|
||||
@@ -99,7 +141,8 @@ function OtherFilters({
|
||||
};
|
||||
|
||||
const renderFilters = (): React.ReactNode => {
|
||||
const isLoading = isFetchingSuggestions || isFetchingAggregateKeys;
|
||||
const isLoading =
|
||||
isFetchingSuggestions || isFetchingAggregateKeys || isLoadingFieldKeys;
|
||||
if (isLoading) return <OtherFiltersSkeleton />;
|
||||
if (!otherFilters?.length)
|
||||
return <div className="no-values-found">No values found</div>;
|
||||
|
||||
@@ -0,0 +1,63 @@
|
||||
import './styles.scss';
|
||||
|
||||
import { Select } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_CATEGORIES } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnitSelectorProps } from './types';
|
||||
import { mapMetricUnitToUniversalUnit } from './utils';
|
||||
|
||||
function YAxisUnitSelector({
|
||||
value,
|
||||
onChange,
|
||||
placeholder = 'Please select a unit',
|
||||
loading = false,
|
||||
}: YAxisUnitSelectorProps): JSX.Element {
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(value);
|
||||
|
||||
const handleSearch = (
|
||||
searchTerm: string,
|
||||
currentOption: DefaultOptionType | undefined,
|
||||
): boolean => {
|
||||
if (!currentOption?.value) return false;
|
||||
|
||||
const search = searchTerm.toLowerCase();
|
||||
const unitId = currentOption.value.toString().toLowerCase();
|
||||
const unitLabel = currentOption.children?.toString().toLowerCase() || '';
|
||||
|
||||
// Check label and id
|
||||
if (unitId.includes(search) || unitLabel.includes(search)) return true;
|
||||
|
||||
// Check aliases (from the mapping) using array iteration
|
||||
const aliases = Array.from(
|
||||
UniversalYAxisUnitMappings[currentOption.value as UniversalYAxisUnit] ?? [],
|
||||
);
|
||||
|
||||
return aliases.some((alias) => alias.toLowerCase().includes(search));
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="y-axis-unit-selector-component">
|
||||
<Select
|
||||
showSearch
|
||||
value={universalUnit}
|
||||
onChange={onChange}
|
||||
placeholder={placeholder}
|
||||
filterOption={(input, option): boolean => handleSearch(input, option)}
|
||||
loading={loading}
|
||||
>
|
||||
{Y_AXIS_CATEGORIES.map((category) => (
|
||||
<Select.OptGroup key={category.name} label={category.name}>
|
||||
{category.units.map((unit) => (
|
||||
<Select.Option key={unit.id} value={unit.id}>
|
||||
{unit.name}
|
||||
</Select.Option>
|
||||
))}
|
||||
</Select.OptGroup>
|
||||
))}
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default YAxisUnitSelector;
|
||||
@@ -0,0 +1,68 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
|
||||
import YAxisUnitSelector from '../YAxisUnitSelector';
|
||||
|
||||
describe('YAxisUnitSelector', () => {
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
mockOnChange.mockClear();
|
||||
});
|
||||
|
||||
it('renders with default placeholder', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
expect(screen.getByText('Please select a unit')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders with custom placeholder', () => {
|
||||
render(
|
||||
<YAxisUnitSelector
|
||||
value=""
|
||||
onChange={mockOnChange}
|
||||
placeholder="Custom placeholder"
|
||||
/>,
|
||||
);
|
||||
expect(screen.queryByText('Custom placeholder')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls onChange when a value is selected', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
const option = screen.getByText('Bytes (B)');
|
||||
fireEvent.click(option);
|
||||
|
||||
expect(mockOnChange).toHaveBeenCalledWith('By', {
|
||||
children: 'Bytes (B)',
|
||||
key: 'By',
|
||||
value: 'By',
|
||||
});
|
||||
});
|
||||
|
||||
it('filters options based on search input', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
const input = screen.getByRole('combobox');
|
||||
fireEvent.change(input, { target: { value: 'byte' } });
|
||||
|
||||
expect(screen.getByText('Bytes/sec')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows all categories and their units', () => {
|
||||
render(<YAxisUnitSelector value="" onChange={mockOnChange} />);
|
||||
const select = screen.getByRole('combobox');
|
||||
|
||||
fireEvent.mouseDown(select);
|
||||
|
||||
// Check for category headers
|
||||
expect(screen.getByText('Data')).toBeInTheDocument();
|
||||
expect(screen.getByText('Time')).toBeInTheDocument();
|
||||
|
||||
// Check for some common units
|
||||
expect(screen.getByText('Bytes (B)')).toBeInTheDocument();
|
||||
expect(screen.getByText('Seconds (s)')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
getUniversalNameFromMetricUnit,
|
||||
mapMetricUnitToUniversalUnit,
|
||||
} from '../utils';
|
||||
|
||||
describe('YAxisUnitSelector utils', () => {
|
||||
describe('mapMetricUnitToUniversalUnit', () => {
|
||||
it('maps known units correctly', () => {
|
||||
expect(mapMetricUnitToUniversalUnit('bytes')).toBe('By');
|
||||
expect(mapMetricUnitToUniversalUnit('seconds')).toBe('s');
|
||||
expect(mapMetricUnitToUniversalUnit('bytes_per_second')).toBe('By/s');
|
||||
});
|
||||
|
||||
it('returns null or self for unknown units', () => {
|
||||
expect(mapMetricUnitToUniversalUnit('unknown_unit')).toBe('unknown_unit');
|
||||
expect(mapMetricUnitToUniversalUnit('')).toBe(null);
|
||||
expect(mapMetricUnitToUniversalUnit(undefined)).toBe(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getUniversalNameFromMetricUnit', () => {
|
||||
it('returns human readable names for known units', () => {
|
||||
expect(getUniversalNameFromMetricUnit('bytes')).toBe('Bytes (B)');
|
||||
expect(getUniversalNameFromMetricUnit('seconds')).toBe('Seconds (s)');
|
||||
expect(getUniversalNameFromMetricUnit('bytes_per_second')).toBe('Bytes/sec');
|
||||
});
|
||||
|
||||
it('returns original unit for unknown units', () => {
|
||||
expect(getUniversalNameFromMetricUnit('unknown_unit')).toBe('unknown_unit');
|
||||
expect(getUniversalNameFromMetricUnit('')).toBe('-');
|
||||
expect(getUniversalNameFromMetricUnit(undefined)).toBe('-');
|
||||
});
|
||||
|
||||
it('handles case variations', () => {
|
||||
expect(getUniversalNameFromMetricUnit('bytes')).toBe('Bytes (B)');
|
||||
expect(getUniversalNameFromMetricUnit('s')).toBe('Seconds (s)');
|
||||
});
|
||||
});
|
||||
});
|
||||
627
frontend/src/components/YAxisUnitSelector/constants.ts
Normal file
627
frontend/src/components/YAxisUnitSelector/constants.ts
Normal file
@@ -0,0 +1,627 @@
|
||||
import { UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
|
||||
// Mapping of universal y-axis units to their AWS, UCUM, and OpenMetrics equivalents
|
||||
export const UniversalYAxisUnitMappings: Record<
|
||||
UniversalYAxisUnit,
|
||||
Set<YAxisUnit>
|
||||
> = {
|
||||
// Time
|
||||
[UniversalYAxisUnit.NANOSECONDS]: new Set([
|
||||
YAxisUnit.UCUM_NANOSECONDS,
|
||||
YAxisUnit.OPEN_METRICS_NANOSECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MICROSECONDS]: new Set([
|
||||
YAxisUnit.AWS_MICROSECONDS,
|
||||
YAxisUnit.UCUM_MICROSECONDS,
|
||||
YAxisUnit.OPEN_METRICS_MICROSECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MILLISECONDS]: new Set([
|
||||
YAxisUnit.AWS_MILLISECONDS,
|
||||
YAxisUnit.UCUM_MILLISECONDS,
|
||||
YAxisUnit.OPEN_METRICS_MILLISECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.SECONDS]: new Set([
|
||||
YAxisUnit.AWS_SECONDS,
|
||||
YAxisUnit.UCUM_SECONDS,
|
||||
YAxisUnit.OPEN_METRICS_SECONDS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MINUTES]: new Set([
|
||||
YAxisUnit.UCUM_MINUTES,
|
||||
YAxisUnit.OPEN_METRICS_MINUTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.HOURS]: new Set([
|
||||
YAxisUnit.UCUM_HOURS,
|
||||
YAxisUnit.OPEN_METRICS_HOURS,
|
||||
]),
|
||||
[UniversalYAxisUnit.DAYS]: new Set([
|
||||
YAxisUnit.UCUM_DAYS,
|
||||
YAxisUnit.OPEN_METRICS_DAYS,
|
||||
]),
|
||||
[UniversalYAxisUnit.WEEKS]: new Set([YAxisUnit.UCUM_WEEKS]),
|
||||
|
||||
// Data
|
||||
[UniversalYAxisUnit.BYTES]: new Set([
|
||||
YAxisUnit.AWS_BYTES,
|
||||
YAxisUnit.UCUM_BYTES,
|
||||
YAxisUnit.OPEN_METRICS_BYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBYTES]: new Set([
|
||||
YAxisUnit.AWS_KILOBYTES,
|
||||
YAxisUnit.UCUM_KILOBYTES,
|
||||
YAxisUnit.OPEN_METRICS_KILOBYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABYTES]: new Set([
|
||||
YAxisUnit.AWS_MEGABYTES,
|
||||
YAxisUnit.UCUM_MEGABYTES,
|
||||
YAxisUnit.OPEN_METRICS_MEGABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABYTES]: new Set([
|
||||
YAxisUnit.AWS_GIGABYTES,
|
||||
YAxisUnit.UCUM_GIGABYTES,
|
||||
YAxisUnit.OPEN_METRICS_GIGABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABYTES]: new Set([
|
||||
YAxisUnit.AWS_TERABYTES,
|
||||
YAxisUnit.UCUM_TERABYTES,
|
||||
YAxisUnit.OPEN_METRICS_TERABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABYTES]: new Set([
|
||||
YAxisUnit.AWS_PETABYTES,
|
||||
YAxisUnit.UCUM_PEBIBYTES,
|
||||
YAxisUnit.OPEN_METRICS_PEBIBYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABYTES]: new Set([
|
||||
YAxisUnit.AWS_EXABYTES,
|
||||
YAxisUnit.UCUM_EXABYTES,
|
||||
YAxisUnit.OPEN_METRICS_EXABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABYTES]: new Set([
|
||||
YAxisUnit.AWS_ZETTABYTES,
|
||||
YAxisUnit.UCUM_ZETTABYTES,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABYTES,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABYTES]: new Set([
|
||||
YAxisUnit.AWS_YOTTABYTES,
|
||||
YAxisUnit.UCUM_YOTTABYTES,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABYTES,
|
||||
]),
|
||||
|
||||
// Data Rate
|
||||
[UniversalYAxisUnit.BYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_BYTES_SECOND,
|
||||
YAxisUnit.UCUM_BYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_BYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_KILOBYTES_SECOND,
|
||||
YAxisUnit.UCUM_KILOBYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_KILOBYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_MEGABYTES_SECOND,
|
||||
YAxisUnit.UCUM_MEGABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_MEGABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_GIGABYTES_SECOND,
|
||||
YAxisUnit.UCUM_GIGABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_GIGABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_TERABYTES_SECOND,
|
||||
YAxisUnit.UCUM_TERABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_TERABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_PETABYTES_SECOND,
|
||||
YAxisUnit.UCUM_PETABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_PETABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_EXABYTES_SECOND,
|
||||
YAxisUnit.UCUM_EXABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_EXABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_ZETTABYTES_SECOND,
|
||||
YAxisUnit.UCUM_ZETTABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABYTES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABYTES_SECOND]: new Set([
|
||||
YAxisUnit.AWS_YOTTABYTES_SECOND,
|
||||
YAxisUnit.UCUM_YOTTABYTES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABYTES_SECOND,
|
||||
]),
|
||||
|
||||
// Bits
|
||||
[UniversalYAxisUnit.BITS]: new Set([
|
||||
YAxisUnit.AWS_BITS,
|
||||
YAxisUnit.UCUM_BITS,
|
||||
YAxisUnit.OPEN_METRICS_BITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBITS]: new Set([
|
||||
YAxisUnit.AWS_KILOBITS,
|
||||
YAxisUnit.UCUM_KILOBITS,
|
||||
YAxisUnit.OPEN_METRICS_KILOBITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABITS]: new Set([
|
||||
YAxisUnit.AWS_MEGABITS,
|
||||
YAxisUnit.UCUM_MEGABITS,
|
||||
YAxisUnit.OPEN_METRICS_MEGABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABITS]: new Set([
|
||||
YAxisUnit.AWS_GIGABITS,
|
||||
YAxisUnit.UCUM_GIGABITS,
|
||||
YAxisUnit.OPEN_METRICS_GIGABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABITS]: new Set([
|
||||
YAxisUnit.AWS_TERABITS,
|
||||
YAxisUnit.UCUM_TERABITS,
|
||||
YAxisUnit.OPEN_METRICS_TERABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABITS]: new Set([
|
||||
YAxisUnit.AWS_PETABITS,
|
||||
YAxisUnit.UCUM_PETABITS,
|
||||
YAxisUnit.OPEN_METRICS_PETABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABITS]: new Set([
|
||||
YAxisUnit.AWS_EXABITS,
|
||||
YAxisUnit.UCUM_EXABITS,
|
||||
YAxisUnit.OPEN_METRICS_EXABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABITS]: new Set([
|
||||
YAxisUnit.AWS_ZETTABITS,
|
||||
YAxisUnit.UCUM_ZETTABITS,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABITS,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABITS]: new Set([
|
||||
YAxisUnit.AWS_YOTTABITS,
|
||||
YAxisUnit.UCUM_YOTTABITS,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABITS,
|
||||
]),
|
||||
|
||||
// Bit Rate
|
||||
[UniversalYAxisUnit.BITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_BITS_SECOND,
|
||||
YAxisUnit.UCUM_BITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_BITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.KILOBITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_KILOBITS_SECOND,
|
||||
YAxisUnit.UCUM_KILOBITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_KILOBITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.MEGABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_MEGABITS_SECOND,
|
||||
YAxisUnit.UCUM_MEGABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_MEGABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.GIGABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_GIGABITS_SECOND,
|
||||
YAxisUnit.UCUM_GIGABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_GIGABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.TERABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_TERABITS_SECOND,
|
||||
YAxisUnit.UCUM_TERABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_TERABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.PETABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_PETABITS_SECOND,
|
||||
YAxisUnit.UCUM_PETABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_PETABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.EXABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_EXABITS_SECOND,
|
||||
YAxisUnit.UCUM_EXABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_EXABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.ZETTABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_ZETTABITS_SECOND,
|
||||
YAxisUnit.UCUM_ZETTABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_ZETTABITS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.YOTTABITS_SECOND]: new Set([
|
||||
YAxisUnit.AWS_YOTTABITS_SECOND,
|
||||
YAxisUnit.UCUM_YOTTABITS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_YOTTABITS_SECOND,
|
||||
]),
|
||||
|
||||
// Count
|
||||
[UniversalYAxisUnit.COUNT]: new Set([
|
||||
YAxisUnit.AWS_COUNT,
|
||||
YAxisUnit.UCUM_COUNT,
|
||||
YAxisUnit.OPEN_METRICS_COUNT,
|
||||
]),
|
||||
[UniversalYAxisUnit.COUNT_SECOND]: new Set([
|
||||
YAxisUnit.AWS_COUNT_SECOND,
|
||||
YAxisUnit.UCUM_COUNT_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_COUNT_SECOND,
|
||||
]),
|
||||
|
||||
// Percent
|
||||
[UniversalYAxisUnit.PERCENT]: new Set([
|
||||
YAxisUnit.AWS_PERCENT,
|
||||
YAxisUnit.UCUM_PERCENT,
|
||||
YAxisUnit.OPEN_METRICS_PERCENT,
|
||||
]),
|
||||
[UniversalYAxisUnit.NONE]: new Set([
|
||||
YAxisUnit.AWS_NONE,
|
||||
YAxisUnit.UCUM_NONE,
|
||||
YAxisUnit.OPEN_METRICS_NONE,
|
||||
]),
|
||||
[UniversalYAxisUnit.PERCENT_UNIT]: new Set([
|
||||
YAxisUnit.OPEN_METRICS_PERCENT_UNIT,
|
||||
]),
|
||||
|
||||
// Count Rate
|
||||
[UniversalYAxisUnit.COUNT_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_COUNTS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_COUNTS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.OPS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_OPS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_OPS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.OPS_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_OPS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_OPS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.REQUESTS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_REQUESTS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_REQUESTS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.REQUESTS_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_REQUESTS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_REQUESTS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.READS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_READS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_READS_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.WRITES_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_WRITES_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_WRITES_SECOND,
|
||||
]),
|
||||
[UniversalYAxisUnit.READS_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_READS_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_READS_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.WRITES_MINUTE]: new Set([
|
||||
YAxisUnit.UCUM_WRITES_MINUTE,
|
||||
YAxisUnit.OPEN_METRICS_WRITES_MINUTE,
|
||||
]),
|
||||
[UniversalYAxisUnit.IOOPS_SECOND]: new Set([
|
||||
YAxisUnit.UCUM_IOPS_SECOND,
|
||||
YAxisUnit.OPEN_METRICS_IOPS_SECOND,
|
||||
]),
|
||||
};
|
||||
|
||||
// Mapping of universal y-axis units to their display labels
|
||||
export const Y_AXIS_UNIT_NAMES: Record<UniversalYAxisUnit, string> = {
|
||||
[UniversalYAxisUnit.SECONDS]: 'Seconds (s)',
|
||||
[UniversalYAxisUnit.MILLISECONDS]: 'Milliseconds (ms)',
|
||||
[UniversalYAxisUnit.MICROSECONDS]: 'Microseconds (µs)',
|
||||
[UniversalYAxisUnit.BYTES]: 'Bytes (B)',
|
||||
[UniversalYAxisUnit.KILOBYTES]: 'Kilobytes (KB)',
|
||||
[UniversalYAxisUnit.MEGABYTES]: 'Megabytes (MB)',
|
||||
[UniversalYAxisUnit.GIGABYTES]: 'Gigabytes (GB)',
|
||||
[UniversalYAxisUnit.TERABYTES]: 'Terabytes (TB)',
|
||||
[UniversalYAxisUnit.PETABYTES]: 'Petabytes (PB)',
|
||||
[UniversalYAxisUnit.EXABYTES]: 'Exabytes (EB)',
|
||||
[UniversalYAxisUnit.ZETTABYTES]: 'Zettabytes (ZB)',
|
||||
[UniversalYAxisUnit.YOTTABYTES]: 'Yottabytes (YB)',
|
||||
[UniversalYAxisUnit.BITS]: 'Bits (b)',
|
||||
[UniversalYAxisUnit.KILOBITS]: 'Kilobits (Kb)',
|
||||
[UniversalYAxisUnit.MEGABITS]: 'Megabits (Mb)',
|
||||
[UniversalYAxisUnit.GIGABITS]: 'Gigabits (Gb)',
|
||||
[UniversalYAxisUnit.TERABITS]: 'Terabits (Tb)',
|
||||
[UniversalYAxisUnit.PETABITS]: 'Petabits (Pb)',
|
||||
[UniversalYAxisUnit.EXABITS]: 'Exabits (Eb)',
|
||||
[UniversalYAxisUnit.ZETTABITS]: 'Zettabits (Zb)',
|
||||
[UniversalYAxisUnit.YOTTABITS]: 'Yottabits (Yb)',
|
||||
[UniversalYAxisUnit.BYTES_SECOND]: 'Bytes/sec',
|
||||
[UniversalYAxisUnit.KILOBYTES_SECOND]: 'Kilobytes/sec',
|
||||
[UniversalYAxisUnit.MEGABYTES_SECOND]: 'Megabytes/sec',
|
||||
[UniversalYAxisUnit.GIGABYTES_SECOND]: 'Gigabytes/sec',
|
||||
[UniversalYAxisUnit.TERABYTES_SECOND]: 'Terabytes/sec',
|
||||
[UniversalYAxisUnit.PETABYTES_SECOND]: 'Petabytes/sec',
|
||||
[UniversalYAxisUnit.EXABYTES_SECOND]: 'Exabytes/sec',
|
||||
[UniversalYAxisUnit.ZETTABYTES_SECOND]: 'Zettabytes/sec',
|
||||
[UniversalYAxisUnit.YOTTABYTES_SECOND]: 'Yottabytes/sec',
|
||||
[UniversalYAxisUnit.BITS_SECOND]: 'Bits/sec',
|
||||
[UniversalYAxisUnit.KILOBITS_SECOND]: 'Kilobits/sec',
|
||||
[UniversalYAxisUnit.MEGABITS_SECOND]: 'Megabits/sec',
|
||||
[UniversalYAxisUnit.GIGABITS_SECOND]: 'Gigabits/sec',
|
||||
[UniversalYAxisUnit.TERABITS_SECOND]: 'Terabits/sec',
|
||||
[UniversalYAxisUnit.PETABITS_SECOND]: 'Petabits/sec',
|
||||
[UniversalYAxisUnit.EXABITS_SECOND]: 'Exabits/sec',
|
||||
[UniversalYAxisUnit.ZETTABITS_SECOND]: 'Zettabits/sec',
|
||||
[UniversalYAxisUnit.YOTTABITS_SECOND]: 'Yottabits/sec',
|
||||
[UniversalYAxisUnit.COUNT]: 'Count',
|
||||
[UniversalYAxisUnit.COUNT_SECOND]: 'Count/sec',
|
||||
[UniversalYAxisUnit.PERCENT]: 'Percent (0 - 100)',
|
||||
[UniversalYAxisUnit.NONE]: 'None',
|
||||
[UniversalYAxisUnit.WEEKS]: 'Weeks',
|
||||
[UniversalYAxisUnit.DAYS]: 'Days',
|
||||
[UniversalYAxisUnit.HOURS]: 'Hours',
|
||||
[UniversalYAxisUnit.MINUTES]: 'Minutes',
|
||||
[UniversalYAxisUnit.NANOSECONDS]: 'Nanoseconds',
|
||||
[UniversalYAxisUnit.COUNT_MINUTE]: 'Count/min',
|
||||
[UniversalYAxisUnit.OPS_SECOND]: 'Ops/sec',
|
||||
[UniversalYAxisUnit.OPS_MINUTE]: 'Ops/min',
|
||||
[UniversalYAxisUnit.REQUESTS_SECOND]: 'Requests/sec',
|
||||
[UniversalYAxisUnit.REQUESTS_MINUTE]: 'Requests/min',
|
||||
[UniversalYAxisUnit.READS_SECOND]: 'Reads/sec',
|
||||
[UniversalYAxisUnit.WRITES_SECOND]: 'Writes/sec',
|
||||
[UniversalYAxisUnit.READS_MINUTE]: 'Reads/min',
|
||||
[UniversalYAxisUnit.WRITES_MINUTE]: 'Writes/min',
|
||||
[UniversalYAxisUnit.IOOPS_SECOND]: 'IOPS/sec',
|
||||
[UniversalYAxisUnit.PERCENT_UNIT]: 'Percent (0.0 - 1.0)',
|
||||
};
|
||||
|
||||
// Splitting the universal y-axis units into categories
|
||||
export const Y_AXIS_CATEGORIES = [
|
||||
{
|
||||
name: 'Time',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.SECONDS],
|
||||
id: UniversalYAxisUnit.SECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MILLISECONDS],
|
||||
id: UniversalYAxisUnit.MILLISECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MICROSECONDS],
|
||||
id: UniversalYAxisUnit.MICROSECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.NANOSECONDS],
|
||||
id: UniversalYAxisUnit.NANOSECONDS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MINUTES],
|
||||
id: UniversalYAxisUnit.MINUTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.HOURS],
|
||||
id: UniversalYAxisUnit.HOURS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.DAYS],
|
||||
id: UniversalYAxisUnit.DAYS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.WEEKS],
|
||||
id: UniversalYAxisUnit.WEEKS,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BYTES],
|
||||
id: UniversalYAxisUnit.BYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBYTES],
|
||||
id: UniversalYAxisUnit.KILOBYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABYTES],
|
||||
id: UniversalYAxisUnit.MEGABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABYTES],
|
||||
id: UniversalYAxisUnit.GIGABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABYTES],
|
||||
id: UniversalYAxisUnit.TERABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABYTES],
|
||||
id: UniversalYAxisUnit.PETABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES],
|
||||
id: UniversalYAxisUnit.EXABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES],
|
||||
id: UniversalYAxisUnit.ZETTABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES],
|
||||
id: UniversalYAxisUnit.YOTTABYTES,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BITS],
|
||||
id: UniversalYAxisUnit.BITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBITS],
|
||||
id: UniversalYAxisUnit.KILOBITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABITS],
|
||||
id: UniversalYAxisUnit.MEGABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABITS],
|
||||
id: UniversalYAxisUnit.GIGABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABITS],
|
||||
id: UniversalYAxisUnit.TERABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABITS],
|
||||
id: UniversalYAxisUnit.PETABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS],
|
||||
id: UniversalYAxisUnit.EXABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS],
|
||||
id: UniversalYAxisUnit.ZETTABITS,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS],
|
||||
id: UniversalYAxisUnit.YOTTABITS,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Data Rate',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BYTES_SECOND],
|
||||
id: UniversalYAxisUnit.BYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBYTES_SECOND],
|
||||
id: UniversalYAxisUnit.KILOBYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.MEGABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.GIGABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.TERABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.PETABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.EXABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.ZETTABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABYTES_SECOND],
|
||||
id: UniversalYAxisUnit.YOTTABYTES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.BITS_SECOND],
|
||||
id: UniversalYAxisUnit.BITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.KILOBITS_SECOND],
|
||||
id: UniversalYAxisUnit.KILOBITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.MEGABITS_SECOND],
|
||||
id: UniversalYAxisUnit.MEGABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.GIGABITS_SECOND],
|
||||
id: UniversalYAxisUnit.GIGABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.TERABITS_SECOND],
|
||||
id: UniversalYAxisUnit.TERABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PETABITS_SECOND],
|
||||
id: UniversalYAxisUnit.PETABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.EXABITS_SECOND],
|
||||
id: UniversalYAxisUnit.EXABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.ZETTABITS_SECOND],
|
||||
id: UniversalYAxisUnit.ZETTABITS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.YOTTABITS_SECOND],
|
||||
id: UniversalYAxisUnit.YOTTABITS_SECOND,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Count',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT],
|
||||
id: UniversalYAxisUnit.COUNT,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT_SECOND],
|
||||
id: UniversalYAxisUnit.COUNT_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.COUNT_MINUTE],
|
||||
id: UniversalYAxisUnit.COUNT_MINUTE,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Operations',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.OPS_SECOND],
|
||||
id: UniversalYAxisUnit.OPS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.OPS_MINUTE],
|
||||
id: UniversalYAxisUnit.OPS_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.REQUESTS_SECOND],
|
||||
id: UniversalYAxisUnit.REQUESTS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.REQUESTS_MINUTE],
|
||||
id: UniversalYAxisUnit.REQUESTS_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.READS_SECOND],
|
||||
id: UniversalYAxisUnit.READS_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.WRITES_SECOND],
|
||||
id: UniversalYAxisUnit.WRITES_SECOND,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.READS_MINUTE],
|
||||
id: UniversalYAxisUnit.READS_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.WRITES_MINUTE],
|
||||
id: UniversalYAxisUnit.WRITES_MINUTE,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.IOOPS_SECOND],
|
||||
id: UniversalYAxisUnit.IOOPS_SECOND,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Percentage',
|
||||
units: [
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PERCENT],
|
||||
id: UniversalYAxisUnit.PERCENT,
|
||||
},
|
||||
{
|
||||
name: Y_AXIS_UNIT_NAMES[UniversalYAxisUnit.PERCENT_UNIT],
|
||||
id: UniversalYAxisUnit.PERCENT_UNIT,
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
3
frontend/src/components/YAxisUnitSelector/index.ts
Normal file
3
frontend/src/components/YAxisUnitSelector/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import YAxisUnitSelector from './YAxisUnitSelector';
|
||||
|
||||
export default YAxisUnitSelector;
|
||||
5
frontend/src/components/YAxisUnitSelector/styles.scss
Normal file
5
frontend/src/components/YAxisUnitSelector/styles.scss
Normal file
@@ -0,0 +1,5 @@
|
||||
.y-axis-unit-selector-component {
|
||||
.ant-select {
|
||||
width: 220px;
|
||||
}
|
||||
}
|
||||
365
frontend/src/components/YAxisUnitSelector/types.ts
Normal file
365
frontend/src/components/YAxisUnitSelector/types.ts
Normal file
@@ -0,0 +1,365 @@
|
||||
export interface YAxisUnitSelectorProps {
|
||||
value: string | undefined;
|
||||
onChange: (value: UniversalYAxisUnit) => void;
|
||||
placeholder?: string;
|
||||
loading?: boolean;
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export enum UniversalYAxisUnit {
|
||||
// Time
|
||||
WEEKS = 'wk',
|
||||
DAYS = 'd',
|
||||
HOURS = 'h',
|
||||
MINUTES = 'min',
|
||||
SECONDS = 's',
|
||||
MICROSECONDS = 'us',
|
||||
MILLISECONDS = 'ms',
|
||||
NANOSECONDS = 'ns',
|
||||
|
||||
// Data
|
||||
BYTES = 'By',
|
||||
KILOBYTES = 'kBy',
|
||||
MEGABYTES = 'MBy',
|
||||
GIGABYTES = 'GBy',
|
||||
TERABYTES = 'TBy',
|
||||
PETABYTES = 'PBy',
|
||||
EXABYTES = 'EBy',
|
||||
ZETTABYTES = 'ZBy',
|
||||
YOTTABYTES = 'YBy',
|
||||
|
||||
// Data Rate
|
||||
BYTES_SECOND = 'By/s',
|
||||
KILOBYTES_SECOND = 'kBy/s',
|
||||
MEGABYTES_SECOND = 'MBy/s',
|
||||
GIGABYTES_SECOND = 'GBy/s',
|
||||
TERABYTES_SECOND = 'TBy/s',
|
||||
PETABYTES_SECOND = 'PBy/s',
|
||||
EXABYTES_SECOND = 'EBy/s',
|
||||
ZETTABYTES_SECOND = 'ZBy/s',
|
||||
YOTTABYTES_SECOND = 'YBy/s',
|
||||
|
||||
// Bits
|
||||
BITS = 'bit',
|
||||
KILOBITS = 'kbit',
|
||||
MEGABITS = 'Mbit',
|
||||
GIGABITS = 'Gbit',
|
||||
TERABITS = 'Tbit',
|
||||
PETABITS = 'Pbit',
|
||||
EXABITS = 'Ebit',
|
||||
ZETTABITS = 'Zbit',
|
||||
YOTTABITS = 'Ybit',
|
||||
|
||||
// Bit Rate
|
||||
BITS_SECOND = 'bit/s',
|
||||
KILOBITS_SECOND = 'kbit/s',
|
||||
MEGABITS_SECOND = 'Mbit/s',
|
||||
GIGABITS_SECOND = 'Gbit/s',
|
||||
TERABITS_SECOND = 'Tbit/s',
|
||||
PETABITS_SECOND = 'Pbit/s',
|
||||
EXABITS_SECOND = 'Ebit/s',
|
||||
ZETTABITS_SECOND = 'Zbit/s',
|
||||
YOTTABITS_SECOND = 'Ybit/s',
|
||||
|
||||
// Count
|
||||
COUNT = '{count}',
|
||||
COUNT_SECOND = '{count}/s',
|
||||
COUNT_MINUTE = '{count}/min',
|
||||
|
||||
// Operations
|
||||
OPS_SECOND = '{ops}/s',
|
||||
OPS_MINUTE = '{ops}/min',
|
||||
|
||||
// Requests
|
||||
REQUESTS_SECOND = '{req}/s',
|
||||
REQUESTS_MINUTE = '{req}/min',
|
||||
|
||||
// Reads/Writes
|
||||
READS_SECOND = '{read}/s',
|
||||
WRITES_SECOND = '{write}/s',
|
||||
READS_MINUTE = '{read}/min',
|
||||
WRITES_MINUTE = '{write}/min',
|
||||
|
||||
// IO Operations
|
||||
IOOPS_SECOND = '{iops}/s',
|
||||
|
||||
// Percent
|
||||
PERCENT = '%',
|
||||
PERCENT_UNIT = 'percentunit',
|
||||
NONE = '1',
|
||||
}
|
||||
|
||||
export enum YAxisUnit {
|
||||
AWS_SECONDS = 'Seconds',
|
||||
UCUM_SECONDS = 's',
|
||||
OPEN_METRICS_SECONDS = 'seconds',
|
||||
|
||||
AWS_MICROSECONDS = 'Microseconds',
|
||||
UCUM_MICROSECONDS = 'us',
|
||||
OPEN_METRICS_MICROSECONDS = 'microseconds',
|
||||
|
||||
AWS_MILLISECONDS = 'Milliseconds',
|
||||
UCUM_MILLISECONDS = 'ms',
|
||||
OPEN_METRICS_MILLISECONDS = 'milliseconds',
|
||||
|
||||
AWS_BYTES = 'Bytes',
|
||||
UCUM_BYTES = 'By',
|
||||
OPEN_METRICS_BYTES = 'bytes',
|
||||
|
||||
AWS_KILOBYTES = 'Kilobytes',
|
||||
UCUM_KILOBYTES = 'kBy',
|
||||
OPEN_METRICS_KILOBYTES = 'kilobytes',
|
||||
|
||||
AWS_MEGABYTES = 'Megabytes',
|
||||
UCUM_MEGABYTES = 'MBy',
|
||||
OPEN_METRICS_MEGABYTES = 'megabytes',
|
||||
|
||||
AWS_GIGABYTES = 'Gigabytes',
|
||||
UCUM_GIGABYTES = 'GBy',
|
||||
OPEN_METRICS_GIGABYTES = 'gigabytes',
|
||||
|
||||
AWS_TERABYTES = 'Terabytes',
|
||||
UCUM_TERABYTES = 'TBy',
|
||||
OPEN_METRICS_TERABYTES = 'terabytes',
|
||||
|
||||
AWS_PETABYTES = 'Petabytes',
|
||||
UCUM_PETABYTES = 'PBy',
|
||||
OPEN_METRICS_PETABYTES = 'petabytes',
|
||||
|
||||
AWS_EXABYTES = 'Exabytes',
|
||||
UCUM_EXABYTES = 'EBy',
|
||||
OPEN_METRICS_EXABYTES = 'exabytes',
|
||||
|
||||
AWS_ZETTABYTES = 'Zettabytes',
|
||||
UCUM_ZETTABYTES = 'ZBy',
|
||||
OPEN_METRICS_ZETTABYTES = 'zettabytes',
|
||||
|
||||
AWS_YOTTABYTES = 'Yottabytes',
|
||||
UCUM_YOTTABYTES = 'YBy',
|
||||
OPEN_METRICS_YOTTABYTES = 'yottabytes',
|
||||
|
||||
AWS_BYTES_SECOND = 'Bytes/Second',
|
||||
UCUM_BYTES_SECOND = 'By/s',
|
||||
OPEN_METRICS_BYTES_SECOND = 'bytes_per_second',
|
||||
|
||||
AWS_KILOBYTES_SECOND = 'Kilobytes/Second',
|
||||
UCUM_KILOBYTES_SECOND = 'kBy/s',
|
||||
OPEN_METRICS_KILOBYTES_SECOND = 'kilobytes_per_second',
|
||||
|
||||
AWS_MEGABYTES_SECOND = 'Megabytes/Second',
|
||||
UCUM_MEGABYTES_SECOND = 'MBy/s',
|
||||
OPEN_METRICS_MEGABYTES_SECOND = 'megabytes_per_second',
|
||||
|
||||
AWS_GIGABYTES_SECOND = 'Gigabytes/Second',
|
||||
UCUM_GIGABYTES_SECOND = 'GBy/s',
|
||||
OPEN_METRICS_GIGABYTES_SECOND = 'gigabytes_per_second',
|
||||
|
||||
AWS_TERABYTES_SECOND = 'Terabytes/Second',
|
||||
UCUM_TERABYTES_SECOND = 'TBy/s',
|
||||
OPEN_METRICS_TERABYTES_SECOND = 'terabytes_per_second',
|
||||
|
||||
AWS_PETABYTES_SECOND = 'Petabytes/Second',
|
||||
UCUM_PETABYTES_SECOND = 'PBy/s',
|
||||
OPEN_METRICS_PETABYTES_SECOND = 'petabytes_per_second',
|
||||
|
||||
AWS_EXABYTES_SECOND = 'Exabytes/Second',
|
||||
UCUM_EXABYTES_SECOND = 'EBy/s',
|
||||
OPEN_METRICS_EXABYTES_SECOND = 'exabytes_per_second',
|
||||
|
||||
AWS_ZETTABYTES_SECOND = 'Zettabytes/Second',
|
||||
UCUM_ZETTABYTES_SECOND = 'ZBy/s',
|
||||
OPEN_METRICS_ZETTABYTES_SECOND = 'zettabytes_per_second',
|
||||
|
||||
AWS_YOTTABYTES_SECOND = 'Yottabytes/Second',
|
||||
UCUM_YOTTABYTES_SECOND = 'YBy/s',
|
||||
OPEN_METRICS_YOTTABYTES_SECOND = 'yottabytes_per_second',
|
||||
|
||||
AWS_BITS = 'Bits',
|
||||
UCUM_BITS = 'bit',
|
||||
OPEN_METRICS_BITS = 'bits',
|
||||
|
||||
AWS_KILOBITS = 'Kilobits',
|
||||
UCUM_KILOBITS = 'kbit',
|
||||
OPEN_METRICS_KILOBITS = 'kilobits',
|
||||
|
||||
AWS_MEGABITS = 'Megabits',
|
||||
UCUM_MEGABITS = 'Mbit',
|
||||
OPEN_METRICS_MEGABITS = 'megabits',
|
||||
|
||||
AWS_GIGABITS = 'Gigabits',
|
||||
UCUM_GIGABITS = 'Gbit',
|
||||
OPEN_METRICS_GIGABITS = 'gigabits',
|
||||
|
||||
AWS_TERABITS = 'Terabits',
|
||||
UCUM_TERABITS = 'Tbit',
|
||||
OPEN_METRICS_TERABITS = 'terabits',
|
||||
|
||||
AWS_PETABITS = 'Petabits',
|
||||
UCUM_PETABITS = 'Pbit',
|
||||
OPEN_METRICS_PETABITS = 'petabits',
|
||||
|
||||
AWS_EXABITS = 'Exabits',
|
||||
UCUM_EXABITS = 'Ebit',
|
||||
OPEN_METRICS_EXABITS = 'exabits',
|
||||
|
||||
AWS_ZETTABITS = 'Zettabits',
|
||||
UCUM_ZETTABITS = 'Zbit',
|
||||
OPEN_METRICS_ZETTABITS = 'zettabits',
|
||||
|
||||
AWS_YOTTABITS = 'Yottabits',
|
||||
UCUM_YOTTABITS = 'Ybit',
|
||||
OPEN_METRICS_YOTTABITS = 'yottabits',
|
||||
|
||||
AWS_BITS_SECOND = 'Bits/Second',
|
||||
UCUM_BITS_SECOND = 'bit/s',
|
||||
OPEN_METRICS_BITS_SECOND = 'bits_per_second',
|
||||
|
||||
AWS_KILOBITS_SECOND = 'Kilobits/Second',
|
||||
UCUM_KILOBITS_SECOND = 'kbit/s',
|
||||
OPEN_METRICS_KILOBITS_SECOND = 'kilobits_per_second',
|
||||
|
||||
AWS_MEGABITS_SECOND = 'Megabits/Second',
|
||||
UCUM_MEGABITS_SECOND = 'Mbit/s',
|
||||
OPEN_METRICS_MEGABITS_SECOND = 'megabits_per_second',
|
||||
|
||||
AWS_GIGABITS_SECOND = 'Gigabits/Second',
|
||||
UCUM_GIGABITS_SECOND = 'Gbit/s',
|
||||
OPEN_METRICS_GIGABITS_SECOND = 'gigabits_per_second',
|
||||
|
||||
AWS_TERABITS_SECOND = 'Terabits/Second',
|
||||
UCUM_TERABITS_SECOND = 'Tbit/s',
|
||||
OPEN_METRICS_TERABITS_SECOND = 'terabits_per_second',
|
||||
|
||||
AWS_PETABITS_SECOND = 'Petabits/Second',
|
||||
UCUM_PETABITS_SECOND = 'Pbit/s',
|
||||
OPEN_METRICS_PETABITS_SECOND = 'petabits_per_second',
|
||||
|
||||
AWS_EXABITS_SECOND = 'Exabits/Second',
|
||||
UCUM_EXABITS_SECOND = 'Ebit/s',
|
||||
OPEN_METRICS_EXABITS_SECOND = 'exabits_per_second',
|
||||
|
||||
AWS_ZETTABITS_SECOND = 'Zettabits/Second',
|
||||
UCUM_ZETTABITS_SECOND = 'Zbit/s',
|
||||
OPEN_METRICS_ZETTABITS_SECOND = 'zettabits_per_second',
|
||||
|
||||
AWS_YOTTABITS_SECOND = 'Yottabits/Second',
|
||||
UCUM_YOTTABITS_SECOND = 'Ybit/s',
|
||||
OPEN_METRICS_YOTTABITS_SECOND = 'yottabits_per_second',
|
||||
|
||||
AWS_COUNT = 'Count',
|
||||
UCUM_COUNT = '{count}',
|
||||
OPEN_METRICS_COUNT = 'count',
|
||||
|
||||
AWS_COUNT_SECOND = 'Count/Second',
|
||||
UCUM_COUNT_SECOND = '{count}/s',
|
||||
OPEN_METRICS_COUNT_SECOND = 'count_per_second',
|
||||
|
||||
AWS_PERCENT = 'Percent',
|
||||
UCUM_PERCENT = '%',
|
||||
OPEN_METRICS_PERCENT = 'ratio',
|
||||
|
||||
AWS_NONE = 'None',
|
||||
UCUM_NONE = '1',
|
||||
OPEN_METRICS_NONE = 'none',
|
||||
|
||||
UCUM_NANOSECONDS = 'ns',
|
||||
OPEN_METRICS_NANOSECONDS = 'nanoseconds',
|
||||
|
||||
UCUM_MINUTES = 'min',
|
||||
OPEN_METRICS_MINUTES = 'minutes',
|
||||
|
||||
UCUM_HOURS = 'h',
|
||||
OPEN_METRICS_HOURS = 'hours',
|
||||
|
||||
UCUM_DAYS = 'd',
|
||||
OPEN_METRICS_DAYS = 'days',
|
||||
|
||||
UCUM_WEEKS = 'wk',
|
||||
OPEN_METRICS_WEEKS = 'weeks',
|
||||
|
||||
UCUM_KIBIBYTES = 'KiBy',
|
||||
OPEN_METRICS_KIBIBYTES = 'kibibytes',
|
||||
|
||||
UCUM_MEBIBYTES = 'MiBy',
|
||||
OPEN_METRICS_MEBIBYTES = 'mebibytes',
|
||||
|
||||
UCUM_GIBIBYTES = 'GiBy',
|
||||
OPEN_METRICS_GIBIBYTES = 'gibibytes',
|
||||
|
||||
UCUM_TEBIBYTES = 'TiBy',
|
||||
OPEN_METRICS_TEBIBYTES = 'tebibytes',
|
||||
|
||||
UCUM_PEBIBYTES = 'PiBy',
|
||||
OPEN_METRICS_PEBIBYTES = 'pebibytes',
|
||||
|
||||
UCUM_KIBIBYTES_SECOND = 'KiBy/s',
|
||||
OPEN_METRICS_KIBIBYTES_SECOND = 'kibibytes_per_second',
|
||||
|
||||
UCUM_KIBIBITS_SECOND = 'Kibit/s',
|
||||
OPEN_METRICS_KIBIBITS_SECOND = 'kibibits_per_second',
|
||||
|
||||
UCUM_MEBIBYTES_SECOND = 'MiBy/s',
|
||||
OPEN_METRICS_MEBIBYTES_SECOND = 'mebibytes_per_second',
|
||||
|
||||
UCUM_MEBIBITS_SECOND = 'Mibit/s',
|
||||
OPEN_METRICS_MEBIBITS_SECOND = 'mebibits_per_second',
|
||||
|
||||
UCUM_GIBIBYTES_SECOND = 'GiBy/s',
|
||||
OPEN_METRICS_GIBIBYTES_SECOND = 'gibibytes_per_second',
|
||||
|
||||
UCUM_GIBIBITS_SECOND = 'Gibit/s',
|
||||
OPEN_METRICS_GIBIBITS_SECOND = 'gibibits_per_second',
|
||||
|
||||
UCUM_TEBIBYTES_SECOND = 'TiBy/s',
|
||||
OPEN_METRICS_TEBIBYTES_SECOND = 'tebibytes_per_second',
|
||||
|
||||
UCUM_TEBIBITS_SECOND = 'Tibit/s',
|
||||
OPEN_METRICS_TEBIBITS_SECOND = 'tebibits_per_second',
|
||||
|
||||
UCUM_PEBIBYTES_SECOND = 'PiBy/s',
|
||||
OPEN_METRICS_PEBIBYTES_SECOND = 'pebibytes_per_second',
|
||||
|
||||
UCUM_PEBIBITS_SECOND = 'Pibit/s',
|
||||
OPEN_METRICS_PEBIBITS_SECOND = 'pebibits_per_second',
|
||||
|
||||
UCUM_TRUE_FALSE = '{bool}',
|
||||
OPEN_METRICS_TRUE_FALSE = 'boolean_true_false',
|
||||
|
||||
UCUM_YES_NO = '{bool}',
|
||||
OPEN_METRICS_YES_NO = 'boolean_yes_no',
|
||||
|
||||
UCUM_COUNTS_SECOND = '{count}/s',
|
||||
OPEN_METRICS_COUNTS_SECOND = 'counts_per_second',
|
||||
|
||||
UCUM_OPS_SECOND = '{ops}/s',
|
||||
OPEN_METRICS_OPS_SECOND = 'ops_per_second',
|
||||
|
||||
UCUM_REQUESTS_SECOND = '{requests}/s',
|
||||
OPEN_METRICS_REQUESTS_SECOND = 'requests_per_second',
|
||||
|
||||
UCUM_REQUESTS_MINUTE = '{requests}/min',
|
||||
OPEN_METRICS_REQUESTS_MINUTE = 'requests_per_minute',
|
||||
|
||||
UCUM_READS_SECOND = '{reads}/s',
|
||||
OPEN_METRICS_READS_SECOND = 'reads_per_second',
|
||||
|
||||
UCUM_WRITES_SECOND = '{writes}/s',
|
||||
OPEN_METRICS_WRITES_SECOND = 'writes_per_second',
|
||||
|
||||
UCUM_IOPS_SECOND = '{iops}/s',
|
||||
OPEN_METRICS_IOPS_SECOND = 'io_ops_per_second',
|
||||
|
||||
UCUM_COUNTS_MINUTE = '{count}/min',
|
||||
OPEN_METRICS_COUNTS_MINUTE = 'counts_per_minute',
|
||||
|
||||
UCUM_OPS_MINUTE = '{ops}/min',
|
||||
OPEN_METRICS_OPS_MINUTE = 'ops_per_minute',
|
||||
|
||||
UCUM_READS_MINUTE = '{reads}/min',
|
||||
OPEN_METRICS_READS_MINUTE = 'reads_per_minute',
|
||||
|
||||
UCUM_WRITES_MINUTE = '{writes}/min',
|
||||
OPEN_METRICS_WRITES_MINUTE = 'writes_per_minute',
|
||||
|
||||
OPEN_METRICS_PERCENT_UNIT = 'percentunit',
|
||||
}
|
||||
33
frontend/src/components/YAxisUnitSelector/utils.tsx
Normal file
33
frontend/src/components/YAxisUnitSelector/utils.tsx
Normal file
@@ -0,0 +1,33 @@
|
||||
import { UniversalYAxisUnitMappings, Y_AXIS_UNIT_NAMES } from './constants';
|
||||
import { UniversalYAxisUnit, YAxisUnit } from './types';
|
||||
|
||||
export const mapMetricUnitToUniversalUnit = (
|
||||
unit: string | undefined,
|
||||
): UniversalYAxisUnit | null => {
|
||||
if (!unit) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const universalUnit = Object.values(UniversalYAxisUnit).find(
|
||||
(u) => UniversalYAxisUnitMappings[u].has(unit as YAxisUnit) || unit === u,
|
||||
);
|
||||
|
||||
return universalUnit || (unit as UniversalYAxisUnit) || null;
|
||||
};
|
||||
|
||||
export const getUniversalNameFromMetricUnit = (
|
||||
unit: string | undefined,
|
||||
): string => {
|
||||
if (!unit) {
|
||||
return '-';
|
||||
}
|
||||
|
||||
const universalUnit = mapMetricUnitToUniversalUnit(unit);
|
||||
if (!universalUnit) {
|
||||
return unit;
|
||||
}
|
||||
|
||||
const universalName = Y_AXIS_UNIT_NAMES[universalUnit];
|
||||
|
||||
return universalName || unit || '-';
|
||||
};
|
||||
@@ -77,9 +77,9 @@ const ROUTES = {
|
||||
API_MONITORING: '/api-monitoring/explorer',
|
||||
METRICS_EXPLORER_BASE: '/metrics-explorer',
|
||||
WORKSPACE_ACCESS_RESTRICTED: '/workspace-access-restricted',
|
||||
METER_EXPLORER_BASE: '/meter-explorer',
|
||||
METER_EXPLORER: '/meter-explorer',
|
||||
METER_EXPLORER_VIEWS: '/meter-explorer/views',
|
||||
METER: '/meter',
|
||||
METER_EXPLORER: '/meter/explorer',
|
||||
METER_EXPLORER_VIEWS: '/meter/explorer/views',
|
||||
HOME_PAGE: '/',
|
||||
} as const;
|
||||
|
||||
|
||||
@@ -520,12 +520,6 @@ function ClusterDetails({
|
||||
>
|
||||
Cluster Name
|
||||
</Typography.Text>
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="entity-details-metadata-label"
|
||||
>
|
||||
Cluster Name
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<div className="values-row">
|
||||
<Typography.Text className="entity-details-metadata-value">
|
||||
@@ -533,9 +527,6 @@ function ClusterDetails({
|
||||
{cluster.meta.k8s_cluster_name}
|
||||
</Tooltip>
|
||||
</Typography.Text>
|
||||
<Typography.Text className="entity-details-metadata-value">
|
||||
<Tooltip title="Cluster name">{cluster.meta.k8s_cluster_name}</Tooltip>
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,351 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { Time } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import EntityEvents from '../EntityEvents';
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const mockUseQuery = jest.fn();
|
||||
jest.mock('react-query', () => ({
|
||||
useQuery: (queryKey: any, queryFn: any, options: any): any =>
|
||||
mockUseQuery(queryKey, queryFn, options),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: `${process.env.FRONTEND_API_ENDPOINT}/${ROUTES.INFRASTRUCTURE_MONITORING_KUBERNETES}/`,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
const mockUseQueryBuilderData = {
|
||||
handleRunQuery: jest.fn(),
|
||||
stagedQuery: initialQueriesMap[DataSource.METRICS],
|
||||
updateAllQueriesOperators: jest.fn(),
|
||||
currentQuery: initialQueriesMap[DataSource.METRICS],
|
||||
resetQuery: jest.fn(),
|
||||
redirectWithQueryBuilderData: jest.fn(),
|
||||
isStagedQueryUpdated: jest.fn(),
|
||||
handleSetQueryData: jest.fn(),
|
||||
handleSetFormulaData: jest.fn(),
|
||||
handleSetQueryItemData: jest.fn(),
|
||||
handleSetConfig: jest.fn(),
|
||||
removeQueryBuilderEntityByIndex: jest.fn(),
|
||||
removeQueryTypeItemByIndex: jest.fn(),
|
||||
isDefaultQuery: jest.fn(),
|
||||
};
|
||||
|
||||
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
|
||||
...mockUseQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const timeRange = {
|
||||
startTime: 1718236800,
|
||||
endTime: 1718236800,
|
||||
};
|
||||
|
||||
const mockHandleChangeEventFilters = jest.fn();
|
||||
|
||||
const mockFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'pod-name',
|
||||
key: {
|
||||
id: 'pod-name',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
key: 'pod-name',
|
||||
type: 'tag',
|
||||
isJSON: false,
|
||||
isIndexed: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'pod-1',
|
||||
},
|
||||
],
|
||||
op: 'and',
|
||||
};
|
||||
|
||||
const isModalTimeSelection = false;
|
||||
const mockHandleTimeChange = jest.fn();
|
||||
const selectedInterval: Time = '1m';
|
||||
const category = K8sCategory.PODS;
|
||||
const queryKey = 'pod-events';
|
||||
|
||||
const mockEventsData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [
|
||||
{
|
||||
timestamp: '2024-01-15T10:00:00Z',
|
||||
data: {
|
||||
id: 'event-1',
|
||||
severity_text: 'INFO',
|
||||
body: 'Test event 1',
|
||||
resources_string: { 'pod.name': 'test-pod-1' },
|
||||
attributes_string: { service: 'test-service' },
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamp: '2024-01-15T10:01:00Z',
|
||||
data: {
|
||||
id: 'event-2',
|
||||
severity_text: 'WARN',
|
||||
body: 'Test event 2',
|
||||
resources_string: { 'pod.name': 'test-pod-2' },
|
||||
attributes_string: { service: 'test-service' },
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockEmptyEventsData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const createMockEvent = (
|
||||
id: string,
|
||||
severity: string,
|
||||
body: string,
|
||||
podName: string,
|
||||
): any => ({
|
||||
timestamp: `2024-01-15T10:${id.padStart(2, '0')}:00Z`,
|
||||
data: {
|
||||
id: `event-${id}`,
|
||||
severity_text: severity,
|
||||
body,
|
||||
resources_string: { 'pod.name': podName },
|
||||
attributes_string: { service: 'test-service' },
|
||||
},
|
||||
});
|
||||
|
||||
const createMockMoreEventsData = (): any => ({
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: Array.from({ length: 11 }, (_, i) =>
|
||||
createMockEvent(
|
||||
String(i + 1),
|
||||
['INFO', 'WARN', 'ERROR', 'DEBUG'][i % 4],
|
||||
`Test event ${i + 1}`,
|
||||
`test-pod-${i + 1}`,
|
||||
),
|
||||
),
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const renderEntityEvents = (overrides = {}): any => {
|
||||
const defaultProps = {
|
||||
timeRange,
|
||||
handleChangeEventFilters: mockHandleChangeEventFilters,
|
||||
filters: mockFilters,
|
||||
isModalTimeSelection,
|
||||
handleTimeChange: mockHandleTimeChange,
|
||||
selectedInterval,
|
||||
category,
|
||||
queryKey,
|
||||
...overrides,
|
||||
};
|
||||
|
||||
return render(
|
||||
<EntityEvents
|
||||
timeRange={defaultProps.timeRange}
|
||||
handleChangeEventFilters={defaultProps.handleChangeEventFilters}
|
||||
filters={defaultProps.filters}
|
||||
isModalTimeSelection={defaultProps.isModalTimeSelection}
|
||||
handleTimeChange={defaultProps.handleTimeChange}
|
||||
selectedInterval={defaultProps.selectedInterval}
|
||||
category={defaultProps.category}
|
||||
queryKey={defaultProps.queryKey}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('EntityEvents', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockEventsData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render events list with data', () => {
|
||||
renderEntityEvents();
|
||||
expect(screen.getByText('Prev')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
expect(screen.getByText('Test event 1')).toBeInTheDocument();
|
||||
expect(screen.getByText('Test event 2')).toBeInTheDocument();
|
||||
expect(screen.getByText('INFO')).toBeInTheDocument();
|
||||
expect(screen.getByText('WARN')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no events are found', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockEmptyEventsData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
expect(screen.getByText(/No events found for this pods/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders loader when fetching events', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
isFetching: true,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
expect(screen.getByTestId('loader')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows pagination controls when events are present', () => {
|
||||
renderEntityEvents();
|
||||
expect(screen.getByText('Prev')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('disables Prev button on first page', () => {
|
||||
renderEntityEvents();
|
||||
const prevButton = screen.getByText('Prev').closest('button');
|
||||
expect(prevButton).toBeDisabled();
|
||||
});
|
||||
|
||||
it('enables Next button when more events are available', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: createMockMoreEventsData(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(nextButton).not.toBeDisabled();
|
||||
});
|
||||
|
||||
it('navigates to next page when Next button is clicked', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: createMockMoreEventsData(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(nextButton).not.toBeNull();
|
||||
fireEvent.click(nextButton as Element);
|
||||
|
||||
const { calls } = mockUseQuery.mock;
|
||||
const hasPage2Call = calls.some((call) => {
|
||||
const { queryKey: callQueryKey } = call[0] || {};
|
||||
return Array.isArray(callQueryKey) && callQueryKey.includes(2);
|
||||
});
|
||||
expect(hasPage2Call).toBe(true);
|
||||
});
|
||||
|
||||
it('navigates to previous page when Prev button is clicked', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: createMockMoreEventsData(),
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityEvents();
|
||||
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(nextButton).not.toBeNull();
|
||||
fireEvent.click(nextButton as Element);
|
||||
|
||||
const prevButton = screen.getByText('Prev').closest('button');
|
||||
expect(prevButton).not.toBeNull();
|
||||
fireEvent.click(prevButton as Element);
|
||||
|
||||
const { calls } = mockUseQuery.mock;
|
||||
const hasPage1Call = calls.some((call) => {
|
||||
const { queryKey: callQueryKey } = call[0] || {};
|
||||
return Array.isArray(callQueryKey) && callQueryKey.includes(1);
|
||||
});
|
||||
expect(hasPage1Call).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,374 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { Time } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
import EntityMetrics from '../EntityMetrics';
|
||||
|
||||
jest.mock('lib/uPlotLib/getUplotChartOptions', () => ({
|
||||
getUPlotChartOptions: jest.fn().mockReturnValue({}),
|
||||
}));
|
||||
|
||||
jest.mock('lib/uPlotLib/utils/getUplotChartData', () => ({
|
||||
getUPlotChartData: jest.fn().mockReturnValue([]),
|
||||
}));
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.mock('components/Uplot', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="uplot-chart">Uplot Chart</div>,
|
||||
}));
|
||||
|
||||
jest.mock('container/InfraMonitoringK8s/commonUtils', () => ({
|
||||
__esModule: true,
|
||||
getMetricsTableData: jest.fn().mockReturnValue([
|
||||
{
|
||||
rows: [
|
||||
{ data: { timestamp: '2024-01-15T10:00:00Z', value: '42.5' } },
|
||||
{ data: { timestamp: '2024-01-15T10:01:00Z', value: '43.2' } },
|
||||
],
|
||||
columns: [
|
||||
{ key: 'timestamp', label: 'Timestamp', isValueColumn: false },
|
||||
{ key: 'value', label: 'Value', isValueColumn: true },
|
||||
],
|
||||
},
|
||||
]),
|
||||
MetricsTable: jest
|
||||
.fn()
|
||||
.mockImplementation(
|
||||
(): JSX.Element => <div data-testid="metrics-table">Metrics Table</div>,
|
||||
),
|
||||
}));
|
||||
|
||||
const mockUseQueries = jest.fn();
|
||||
jest.mock('react-query', () => ({
|
||||
useQueries: (queryConfigs: any[]): any[] => mockUseQueries(queryConfigs),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: (): boolean => false,
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: (): { width: number; height: number } => ({
|
||||
width: 800,
|
||||
height: 600,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useMultiIntersectionObserver', () => ({
|
||||
useMultiIntersectionObserver: (count: number): any => ({
|
||||
visibilities: new Array(count).fill(true),
|
||||
setElement: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
featureFlags: [
|
||||
{
|
||||
name: 'DOT_METRICS_ENABLED',
|
||||
active: false,
|
||||
},
|
||||
],
|
||||
} as any);
|
||||
|
||||
const mockEntity = {
|
||||
id: 'test-entity-1',
|
||||
name: 'test-entity',
|
||||
type: 'pod',
|
||||
};
|
||||
|
||||
const mockEntityWidgetInfo = [
|
||||
{
|
||||
title: 'CPU Usage',
|
||||
yAxisUnit: 'percentage',
|
||||
},
|
||||
{
|
||||
title: 'Memory Usage',
|
||||
yAxisUnit: 'bytes',
|
||||
},
|
||||
];
|
||||
|
||||
const mockGetEntityQueryPayload = jest.fn().mockReturnValue([
|
||||
{
|
||||
query: 'cpu_usage',
|
||||
start: 1705315200,
|
||||
end: 1705318800,
|
||||
},
|
||||
{
|
||||
query: 'memory_usage',
|
||||
start: 1705315200,
|
||||
end: 1705318800,
|
||||
},
|
||||
]);
|
||||
|
||||
const mockTimeRange = {
|
||||
startTime: 1705315200,
|
||||
endTime: 1705318800,
|
||||
};
|
||||
|
||||
const mockHandleTimeChange = jest.fn();
|
||||
|
||||
const mockQueries = [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { timestamp: '2024-01-15T10:00:00Z', value: '42.5' } },
|
||||
{ data: { timestamp: '2024-01-15T10:01:00Z', value: '43.2' } },
|
||||
],
|
||||
columns: [
|
||||
{ key: 'timestamp', label: 'Timestamp', isValueColumn: false },
|
||||
{ key: 'value', label: 'Value', isValueColumn: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'time_series',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
table: {
|
||||
rows: [
|
||||
{ data: { timestamp: '2024-01-15T10:00:00Z', value: '1024' } },
|
||||
{ data: { timestamp: '2024-01-15T10:01:00Z', value: '1028' } },
|
||||
],
|
||||
columns: [
|
||||
{ key: 'timestamp', label: 'Timestamp', isValueColumn: false },
|
||||
{ key: 'value', label: 'Value', isValueColumn: true },
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'table',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockLoadingQueries = [
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
];
|
||||
|
||||
const mockErrorQueries = [
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
error: new Error('API Error'),
|
||||
},
|
||||
{
|
||||
data: undefined,
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
error: new Error('Network Error'),
|
||||
},
|
||||
];
|
||||
|
||||
const mockEmptyQueries = [
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'time_series',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
{
|
||||
data: {
|
||||
payload: {
|
||||
data: {
|
||||
result: [],
|
||||
},
|
||||
},
|
||||
params: {
|
||||
compositeQuery: {
|
||||
panelType: 'table',
|
||||
},
|
||||
},
|
||||
},
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
error: null,
|
||||
},
|
||||
];
|
||||
|
||||
const renderEntityMetrics = (overrides = {}): any => {
|
||||
const defaultProps = {
|
||||
timeRange: mockTimeRange,
|
||||
isModalTimeSelection: false,
|
||||
handleTimeChange: mockHandleTimeChange,
|
||||
selectedInterval: '5m' as Time,
|
||||
entity: mockEntity,
|
||||
entityWidgetInfo: mockEntityWidgetInfo,
|
||||
getEntityQueryPayload: mockGetEntityQueryPayload,
|
||||
queryKey: 'test-query-key',
|
||||
category: K8sCategory.PODS,
|
||||
...overrides,
|
||||
};
|
||||
|
||||
return render(
|
||||
<EntityMetrics
|
||||
timeRange={defaultProps.timeRange}
|
||||
isModalTimeSelection={defaultProps.isModalTimeSelection}
|
||||
handleTimeChange={defaultProps.handleTimeChange}
|
||||
selectedInterval={defaultProps.selectedInterval}
|
||||
entity={defaultProps.entity}
|
||||
entityWidgetInfo={defaultProps.entityWidgetInfo}
|
||||
getEntityQueryPayload={defaultProps.getEntityQueryPayload}
|
||||
queryKey={defaultProps.queryKey}
|
||||
category={defaultProps.category}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('EntityMetrics', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseQueries.mockReturnValue(mockQueries);
|
||||
});
|
||||
|
||||
it('should render metrics with data', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByText('CPU Usage')).toBeInTheDocument();
|
||||
expect(screen.getByText('Memory Usage')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('uplot-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metrics-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders loading state when fetching metrics', () => {
|
||||
mockUseQueries.mockReturnValue(mockLoadingQueries);
|
||||
renderEntityMetrics();
|
||||
expect(screen.getAllByText('CPU Usage')).toHaveLength(1);
|
||||
expect(screen.getAllByText('Memory Usage')).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('renders error state when query fails', () => {
|
||||
mockUseQueries.mockReturnValue(mockErrorQueries);
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByText('API Error')).toBeInTheDocument();
|
||||
expect(screen.getByText('Network Error')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no metrics data', () => {
|
||||
mockUseQueries.mockReturnValue(mockEmptyQueries);
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByTestId('uplot-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metrics-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleTimeChange when datetime selection changes', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders multiple metric widgets', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByText('CPU Usage')).toBeInTheDocument();
|
||||
expect(screen.getByText('Memory Usage')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('handles different panel types correctly', () => {
|
||||
renderEntityMetrics();
|
||||
expect(screen.getByTestId('uplot-chart')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('metrics-table')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies intersection observer for visibility', () => {
|
||||
renderEntityMetrics();
|
||||
expect(mockUseQueries).toHaveBeenCalledWith(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
enabled: true,
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('generates correct query payloads', () => {
|
||||
renderEntityMetrics();
|
||||
expect(mockGetEntityQueryPayload).toHaveBeenCalledWith(
|
||||
mockEntity,
|
||||
mockTimeRange.startTime,
|
||||
mockTimeRange.endTime,
|
||||
false,
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,288 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { K8sCategory } from 'container/InfraMonitoringK8s/constants';
|
||||
import { Time } from 'container/TopNav/DateTimeSelectionV2/config';
|
||||
import * as useQueryBuilderHooks from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import EntityTraces from '../EntityTraces';
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">Date Time</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const mockUseQuery = jest.fn();
|
||||
jest.mock('react-query', () => ({
|
||||
useQuery: (queryKey: any, queryFn: any, options: any): any =>
|
||||
mockUseQuery(queryKey, queryFn, options),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: (): { pathname: string } => ({
|
||||
pathname: '/test-path',
|
||||
}),
|
||||
useNavigate: (): jest.Mock => jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): { safeNavigate: jest.Mock } => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
const mockUseQueryBuilderData = {
|
||||
handleRunQuery: jest.fn(),
|
||||
stagedQuery: initialQueriesMap[DataSource.METRICS],
|
||||
updateAllQueriesOperators: jest.fn(),
|
||||
currentQuery: initialQueriesMap[DataSource.METRICS],
|
||||
resetQuery: jest.fn(),
|
||||
redirectWithQueryBuilderData: jest.fn(),
|
||||
isStagedQueryUpdated: jest.fn(),
|
||||
handleSetQueryData: jest.fn(),
|
||||
handleSetFormulaData: jest.fn(),
|
||||
handleSetQueryItemData: jest.fn(),
|
||||
handleSetConfig: jest.fn(),
|
||||
removeQueryBuilderEntityByIndex: jest.fn(),
|
||||
removeQueryTypeItemByIndex: jest.fn(),
|
||||
isDefaultQuery: jest.fn(),
|
||||
};
|
||||
|
||||
jest.spyOn(useQueryBuilderHooks, 'useQueryBuilder').mockReturnValue({
|
||||
...mockUseQueryBuilderData,
|
||||
} as any);
|
||||
|
||||
const timeRange = {
|
||||
startTime: 1718236800,
|
||||
endTime: 1718236800,
|
||||
};
|
||||
|
||||
const mockHandleChangeTracesFilters = jest.fn();
|
||||
|
||||
const mockTracesFilters: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'service-name',
|
||||
key: {
|
||||
id: 'service-name',
|
||||
dataType: DataTypes.String,
|
||||
isColumn: true,
|
||||
key: 'service.name',
|
||||
type: 'tag',
|
||||
isJSON: false,
|
||||
isIndexed: false,
|
||||
},
|
||||
op: '=',
|
||||
value: 'test-service',
|
||||
},
|
||||
],
|
||||
op: 'and',
|
||||
};
|
||||
|
||||
const isModalTimeSelection = false;
|
||||
const mockHandleTimeChange = jest.fn();
|
||||
const selectedInterval: Time = '5m';
|
||||
const category = K8sCategory.PODS;
|
||||
const queryKey = 'pod-traces';
|
||||
const queryKeyFilters = ['service.name'];
|
||||
|
||||
const mockTracesData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [
|
||||
{
|
||||
timestamp: '2024-01-15T10:00:00Z',
|
||||
data: {
|
||||
trace_id: 'trace-1',
|
||||
span_id: 'span-1',
|
||||
service_name: 'test-service-1',
|
||||
operation_name: 'test-operation-1',
|
||||
duration: 100,
|
||||
status_code: 200,
|
||||
},
|
||||
},
|
||||
{
|
||||
timestamp: '2024-01-15T10:01:00Z',
|
||||
data: {
|
||||
trace_id: 'trace-2',
|
||||
span_id: 'span-2',
|
||||
service_name: 'test-service-2',
|
||||
operation_name: 'test-operation-2',
|
||||
duration: 150,
|
||||
status_code: 500,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockEmptyTracesData = {
|
||||
payload: {
|
||||
data: {
|
||||
newResult: {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
list: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const renderEntityTraces = (overrides = {}): any => {
|
||||
const defaultProps = {
|
||||
timeRange,
|
||||
isModalTimeSelection,
|
||||
handleTimeChange: mockHandleTimeChange,
|
||||
handleChangeTracesFilters: mockHandleChangeTracesFilters,
|
||||
tracesFilters: mockTracesFilters,
|
||||
selectedInterval,
|
||||
queryKey,
|
||||
category,
|
||||
queryKeyFilters,
|
||||
...overrides,
|
||||
};
|
||||
|
||||
return render(
|
||||
<EntityTraces
|
||||
timeRange={defaultProps.timeRange}
|
||||
isModalTimeSelection={defaultProps.isModalTimeSelection}
|
||||
handleTimeChange={defaultProps.handleTimeChange}
|
||||
handleChangeTracesFilters={defaultProps.handleChangeTracesFilters}
|
||||
tracesFilters={defaultProps.tracesFilters}
|
||||
selectedInterval={defaultProps.selectedInterval}
|
||||
queryKey={defaultProps.queryKey}
|
||||
category={defaultProps.category}
|
||||
queryKeyFilters={defaultProps.queryKeyFilters}
|
||||
/>,
|
||||
);
|
||||
};
|
||||
|
||||
describe('EntityTraces', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockTracesData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should render traces list with data', () => {
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('Previous')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(/Search Filter : select options from suggested values/),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders empty state when no traces are found', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: mockEmptyTracesData,
|
||||
isLoading: false,
|
||||
isError: false,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText(/No traces yet./)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('renders loader when fetching traces', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: undefined,
|
||||
isLoading: true,
|
||||
isError: false,
|
||||
isFetching: true,
|
||||
});
|
||||
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('pending_data_placeholder')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows error state when query fails', () => {
|
||||
mockUseQuery.mockReturnValue({
|
||||
data: { error: 'API Error' },
|
||||
isLoading: false,
|
||||
isError: true,
|
||||
isFetching: false,
|
||||
});
|
||||
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('API Error')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleChangeTracesFilters when query builder search changes', () => {
|
||||
renderEntityTraces();
|
||||
expect(
|
||||
screen.getByText(/Search Filter : select options from suggested values/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls handleTimeChange when datetime selection changes', () => {
|
||||
renderEntityTraces();
|
||||
expect(screen.getByTestId('date-time-selection')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows pagination controls when traces are present', () => {
|
||||
renderEntityTraces();
|
||||
expect(screen.getByText('Previous')).toBeInTheDocument();
|
||||
expect(screen.getByText('Next')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('disables pagination buttons when no more data', () => {
|
||||
renderEntityTraces();
|
||||
const prevButton = screen.getByText('Previous').closest('button');
|
||||
const nextButton = screen.getByText('Next').closest('button');
|
||||
expect(prevButton).toBeDisabled();
|
||||
expect(nextButton).toBeDisabled();
|
||||
});
|
||||
});
|
||||
@@ -4,7 +4,7 @@ import { Skeleton } from 'antd';
|
||||
|
||||
function LoadingContainer(): JSX.Element {
|
||||
return (
|
||||
<div className="k8s-list-loading-state">
|
||||
<div className="k8s-list-loading-state" data-testid="loader">
|
||||
<Skeleton.Input
|
||||
className="k8s-list-loading-state-item"
|
||||
size="large"
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import ClusterDetails from 'container/InfraMonitoringK8s/Clusters/ClusterDetails/ClusterDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('ClusterDetails', () => {
|
||||
const mockCluster = {
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<ClusterDetails
|
||||
cluster={mockCluster}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,141 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import DaemonSetDetails from 'container/InfraMonitoringK8s/DaemonSets/DaemonSetDetails/DaemonSetDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('DaemonSetDetails', () => {
|
||||
const mockDaemonSet = {
|
||||
meta: {
|
||||
k8s_daemonset_name: 'test-daemon-set',
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const daemonSetNameElements = screen.getAllByText('test-daemon-set');
|
||||
expect(daemonSetNameElements.length).toBeGreaterThan(0);
|
||||
expect(daemonSetNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DaemonSetDetails
|
||||
daemonSet={mockDaemonSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,141 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import DeploymentDetails from 'container/InfraMonitoringK8s/Deployments/DeploymentDetails/DeploymentDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('DeploymentDetails', () => {
|
||||
const mockDeployment = {
|
||||
meta: {
|
||||
k8s_deployment_name: 'test-deployment',
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const deploymentNameElements = screen.getAllByText('test-deployment');
|
||||
expect(deploymentNameElements.length).toBeGreaterThan(0);
|
||||
expect(deploymentNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<DeploymentDetails
|
||||
deployment={mockDeployment}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import JobDetails from 'container/InfraMonitoringK8s/Jobs/JobDetails/JobDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('JobDetails', () => {
|
||||
const mockJob = {
|
||||
meta: {
|
||||
k8s_job_name: 'test-job',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const jobNameElements = screen.getAllByText('test-job');
|
||||
expect(jobNameElements.length).toBeGreaterThan(0);
|
||||
expect(jobNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<JobDetails job={mockJob} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,136 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import NamespaceDetails from 'container/InfraMonitoringK8s/Namespaces/NamespaceDetails/NamespaceDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('NamespaceDetails', () => {
|
||||
const mockNamespace = {
|
||||
namespaceName: 'test-namespace',
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NamespaceDetails
|
||||
namespace={mockNamespace}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,116 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import NodeDetails from 'container/InfraMonitoringK8s/Nodes/NodeDetails/NodeDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('NodeDetails', () => {
|
||||
const mockNode = {
|
||||
meta: {
|
||||
k8s_node_name: 'test-node',
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const nodeNameElements = screen.getAllByText('test-node');
|
||||
expect(nodeNameElements.length).toBeGreaterThan(0);
|
||||
expect(nodeNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<NodeDetails node={mockNode} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
122
frontend/src/container/InfraMonitoringK8s/__tests__/Pods/PodDetails/PodDetails.test.tsx
generated
Normal file
122
frontend/src/container/InfraMonitoringK8s/__tests__/Pods/PodDetails/PodDetails.test.tsx
generated
Normal file
@@ -0,0 +1,122 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import PodDetails from 'container/InfraMonitoringK8s/Pods/PodDetails/PodDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('PodDetails', () => {
|
||||
const mockPod = {
|
||||
podName: 'test-pod',
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
k8s_node_name: 'test-node',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const nodeNameElements = screen.getAllByText('test-node');
|
||||
expect(nodeNameElements.length).toBeGreaterThan(0);
|
||||
expect(nodeNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<PodDetails pod={mockPod} isModalTimeSelection onClose={mockOnClose} />
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,136 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import StatefulSetDetails from 'container/InfraMonitoringK8s/StatefulSets/StatefulSetDetails/StatefulSetDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('StatefulSetDetails', () => {
|
||||
const mockStatefulSet = {
|
||||
meta: {
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
k8s_statefulset_name: 'test-stateful-set',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const statefulSetNameElements = screen.getAllByText('test-stateful-set');
|
||||
expect(statefulSetNameElements.length).toBeGreaterThan(0);
|
||||
expect(statefulSetNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render modal with 4 tabs', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByText('Metrics');
|
||||
expect(metricsTab).toBeInTheDocument();
|
||||
|
||||
const eventsTab = screen.getByText('Events');
|
||||
expect(eventsTab).toBeInTheDocument();
|
||||
|
||||
const logsTab = screen.getByText('Logs');
|
||||
expect(logsTab).toBeInTheDocument();
|
||||
|
||||
const tracesTab = screen.getByText('Traces');
|
||||
expect(tracesTab).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('default tab should be metrics', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricsTab = screen.getByRole('radio', { name: 'Metrics' });
|
||||
expect(metricsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should switch to events tab when events tab is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const eventsTab = screen.getByRole('radio', { name: 'Events' });
|
||||
expect(eventsTab).not.toBeChecked();
|
||||
fireEvent.click(eventsTab);
|
||||
expect(eventsTab).toBeChecked();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<StatefulSetDetails
|
||||
statefulSet={mockStatefulSet}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,73 @@
|
||||
/* eslint-disable import/first */
|
||||
// eslint-disable-next-line import/order
|
||||
import setupCommonMocks from '../../commonMocks';
|
||||
|
||||
setupCommonMocks();
|
||||
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import VolumeDetails from 'container/InfraMonitoringK8s/Volumes/VolumeDetails/VolumeDetails';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import store from 'store';
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
describe('VolumeDetails', () => {
|
||||
const mockVolume = {
|
||||
persistentVolumeClaimName: 'test-volume',
|
||||
meta: {
|
||||
k8s_cluster_name: 'test-cluster',
|
||||
k8s_namespace_name: 'test-namespace',
|
||||
},
|
||||
} as any;
|
||||
const mockOnClose = jest.fn();
|
||||
|
||||
it('should render modal with relevant metadata', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<VolumeDetails
|
||||
volume={mockVolume}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const volumeNameElements = screen.getAllByText('test-volume');
|
||||
expect(volumeNameElements.length).toBeGreaterThan(0);
|
||||
expect(volumeNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const clusterNameElements = screen.getAllByText('test-cluster');
|
||||
expect(clusterNameElements.length).toBeGreaterThan(0);
|
||||
expect(clusterNameElements[0]).toBeInTheDocument();
|
||||
|
||||
const namespaceNameElements = screen.getAllByText('test-namespace');
|
||||
expect(namespaceNameElements.length).toBeGreaterThan(0);
|
||||
expect(namespaceNameElements[0]).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should close modal when close button is clicked', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<MemoryRouter>
|
||||
<VolumeDetails
|
||||
volume={mockVolume}
|
||||
isModalTimeSelection
|
||||
onClose={mockOnClose}
|
||||
/>
|
||||
</MemoryRouter>
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const closeButton = screen.getByRole('button', { name: 'Close' });
|
||||
fireEvent.click(closeButton);
|
||||
expect(mockOnClose).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,121 @@
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import * as timezoneHooks from 'providers/Timezone';
|
||||
import { LicenseEvent } from 'types/api/licensesV3/getActive';
|
||||
|
||||
const setupCommonMocks = (): void => {
|
||||
const createMockObserver = (): {
|
||||
observe: jest.Mock;
|
||||
unobserve: jest.Mock;
|
||||
disconnect: jest.Mock;
|
||||
} => ({
|
||||
observe: jest.fn(),
|
||||
unobserve: jest.fn(),
|
||||
disconnect: jest.fn(),
|
||||
});
|
||||
|
||||
global.IntersectionObserver = jest.fn().mockImplementation(createMockObserver);
|
||||
global.ResizeObserver = jest.fn().mockImplementation(createMockObserver);
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useSelector: jest.fn(() => ({
|
||||
globalTime: {
|
||||
selectedTime: {
|
||||
startTime: 1713734400000,
|
||||
endTime: 1713738000000,
|
||||
},
|
||||
maxTime: 1713738000000,
|
||||
minTime: 1713734400000,
|
||||
},
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('uplot', () => ({
|
||||
paths: {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
},
|
||||
default: jest.fn(() => ({
|
||||
paths: {
|
||||
spline: jest.fn(),
|
||||
bars: jest.fn(),
|
||||
},
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom-v5-compat', () => ({
|
||||
...jest.requireActual('react-router-dom-v5-compat'),
|
||||
useSearchParams: jest.fn().mockReturnValue([
|
||||
{
|
||||
get: jest.fn(),
|
||||
entries: jest.fn(() => []),
|
||||
set: jest.fn(),
|
||||
},
|
||||
jest.fn(),
|
||||
]),
|
||||
useNavigationType: (): any => 'PUSH',
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useUrlQuery', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn(() => ({
|
||||
set: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
get: jest.fn(),
|
||||
has: jest.fn(),
|
||||
entries: jest.fn(() => []),
|
||||
append: jest.fn(),
|
||||
toString: jest.fn(() => ''),
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('lib/getMinMax', () => ({
|
||||
__esModule: true,
|
||||
default: jest.fn().mockImplementation(() => ({
|
||||
minTime: 1713734400000,
|
||||
maxTime: 1713738000000,
|
||||
})),
|
||||
isValidTimeFormat: jest.fn().mockReturnValue(true),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
},
|
||||
activeLicenseV3: {
|
||||
event_queue: {
|
||||
created_at: '0',
|
||||
event: LicenseEvent.NO_EVENT,
|
||||
scheduled_at: '0',
|
||||
status: '',
|
||||
updated_at: '0',
|
||||
},
|
||||
license: {
|
||||
license_key: 'test-license-key',
|
||||
license_type: 'trial',
|
||||
org_id: 'test-org-id',
|
||||
plan_id: 'test-plan-id',
|
||||
plan_name: 'test-plan-name',
|
||||
plan_type: 'trial',
|
||||
plan_version: 'test-plan-version',
|
||||
},
|
||||
},
|
||||
} as any);
|
||||
|
||||
jest.spyOn(timezoneHooks, 'useTimezone').mockReturnValue({
|
||||
timezone: {
|
||||
offset: 0,
|
||||
},
|
||||
browserTimezone: {
|
||||
offset: 0,
|
||||
},
|
||||
} as any);
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
};
|
||||
|
||||
export default setupCommonMocks;
|
||||
@@ -0,0 +1,92 @@
|
||||
.meter-explorer-breakdown {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
.meter-explorer-date-time {
|
||||
display: flex;
|
||||
min-height: 30px;
|
||||
justify-content: end;
|
||||
border-bottom: 1px solid var(--bg-slate-500);
|
||||
padding: 10px 16px;
|
||||
}
|
||||
|
||||
.meter-explorer-graphs {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
padding: 20px;
|
||||
gap: 36px;
|
||||
|
||||
.meter-column-graph {
|
||||
.row-card {
|
||||
background-color: var(--bg-ink-400);
|
||||
padding-left: 10px;
|
||||
height: 32px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
|
||||
.section-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 500;
|
||||
line-height: 20px;
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
}
|
||||
|
||||
.graph-description {
|
||||
padding: 10px;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.meter-page-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
align-items: flex-start;
|
||||
gap: 10px;
|
||||
.meter-graph {
|
||||
height: 400px;
|
||||
padding: 10px;
|
||||
width: 100%;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.total {
|
||||
.meter-column-graph {
|
||||
.meter-page-grid {
|
||||
grid-template-columns: repeat(3, 1fr);
|
||||
|
||||
.meter-graph {
|
||||
height: 200px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.meter-explorer-breakdown {
|
||||
.meter-explorer-date-time {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.meter-explorer-graphs {
|
||||
.meter-column-graph {
|
||||
.row-card {
|
||||
background-color: var(--bg-vanilla-300);
|
||||
|
||||
.section-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
200
frontend/src/container/MeterExplorer/Breakdown/BreakDown.tsx
Normal file
200
frontend/src/container/MeterExplorer/Breakdown/BreakDown.tsx
Normal file
@@ -0,0 +1,200 @@
|
||||
import './BreakDown.styles.scss';
|
||||
|
||||
import { Typography } from 'antd';
|
||||
// import useFilterConfig from 'components/QuickFilters/hooks/useFilterConfig';
|
||||
// import { SignalType } from 'components/QuickFilters/types';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import GridCard from 'container/GridCardLayout/GridCard';
|
||||
import { Card, CardContainer } from 'container/GridCardLayout/styles';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
// import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useCallback } from 'react';
|
||||
import { useDispatch } from 'react-redux';
|
||||
import { useHistory, useLocation } from 'react-router-dom';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
// import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import {
|
||||
getLogCountWidgetData,
|
||||
getLogSizeWidgetData,
|
||||
getMetricCountWidgetData,
|
||||
getSpanCountWidgetData,
|
||||
getSpanSizeWidgetData,
|
||||
getTotalLogSizeWidgetData,
|
||||
getTotalMetricDatapointCountWidgetData,
|
||||
getTotalTraceSizeWidgetData,
|
||||
} from './graphs';
|
||||
|
||||
type MetricSection = {
|
||||
id: string;
|
||||
title: string;
|
||||
graphs: Widgets[];
|
||||
};
|
||||
|
||||
const sections: MetricSection[] = [
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Total',
|
||||
graphs: [
|
||||
getTotalLogSizeWidgetData(),
|
||||
getTotalTraceSizeWidgetData(),
|
||||
getTotalMetricDatapointCountWidgetData(),
|
||||
],
|
||||
},
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Logs',
|
||||
graphs: [getLogCountWidgetData(), getLogSizeWidgetData()],
|
||||
},
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Traces',
|
||||
graphs: [getSpanCountWidgetData(), getSpanSizeWidgetData()],
|
||||
},
|
||||
{
|
||||
id: uuid(),
|
||||
title: 'Metrics',
|
||||
graphs: [getMetricCountWidgetData()],
|
||||
},
|
||||
];
|
||||
|
||||
function Section(section: MetricSection): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { title, graphs } = section;
|
||||
const history = useHistory();
|
||||
const { pathname } = useLocation();
|
||||
const dispatch = useDispatch();
|
||||
const urlQuery = useUrlQuery();
|
||||
|
||||
const onDragSelect = useCallback(
|
||||
(start: number, end: number) => {
|
||||
const startTimestamp = Math.trunc(start);
|
||||
const endTimestamp = Math.trunc(end);
|
||||
|
||||
urlQuery.set(QueryParams.startTime, startTimestamp.toString());
|
||||
urlQuery.set(QueryParams.endTime, endTimestamp.toString());
|
||||
const generatedUrl = `${pathname}?${urlQuery.toString()}`;
|
||||
history.push(generatedUrl);
|
||||
|
||||
if (startTimestamp !== endTimestamp) {
|
||||
dispatch(UpdateTimeInterval('custom', [startTimestamp, endTimestamp]));
|
||||
}
|
||||
},
|
||||
[dispatch, history, pathname, urlQuery],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="meter-column-graph">
|
||||
<CardContainer className="row-card" isDarkMode={isDarkMode}>
|
||||
<Typography.Text className="section-title">{title}</Typography.Text>
|
||||
</CardContainer>
|
||||
<div className="meter-page-grid">
|
||||
{graphs.map((widget) => (
|
||||
<Card
|
||||
key={widget?.id}
|
||||
isDarkMode={isDarkMode}
|
||||
$panelType={PANEL_TYPES.BAR}
|
||||
className="meter-graph"
|
||||
>
|
||||
<GridCard widget={widget} onDragSelect={onDragSelect} version="v5" />
|
||||
</Card>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// function FilterDropdown({ attrKey }: { attrKey: string }): JSX.Element {
|
||||
// const {
|
||||
// data: keyValueSuggestions,
|
||||
// isLoading: isLoadingKeyValueSuggestions,
|
||||
// } = useGetQueryKeyValueSuggestions({
|
||||
// key: attrKey,
|
||||
// signal: DataSource.METRICS,
|
||||
// signalSource: 'meter',
|
||||
// options: {
|
||||
// keepPreviousData: true,
|
||||
// },
|
||||
// });
|
||||
|
||||
// const responseData = keyValueSuggestions?.data as any;
|
||||
// const values = responseData?.data?.values || {};
|
||||
// const stringValues = values.stringValues || [];
|
||||
// const numberValues = values.numberValues || [];
|
||||
|
||||
// const stringOptions = stringValues.filter(
|
||||
// (value: string | null | undefined): value is string =>
|
||||
// value !== null && value !== undefined && value !== '',
|
||||
// );
|
||||
|
||||
// const numberOptions = numberValues
|
||||
// .filter(
|
||||
// (value: number | null | undefined): value is number =>
|
||||
// value !== null && value !== undefined,
|
||||
// )
|
||||
// .map((value: number) => value.toString());
|
||||
|
||||
// const vals = [...stringOptions, ...numberOptions];
|
||||
|
||||
// return (
|
||||
// <div className="filter-dropdown">
|
||||
// <Typography.Text>{attrKey}</Typography.Text>
|
||||
// <Select
|
||||
// loading={isLoadingKeyValueSuggestions}
|
||||
// options={vals?.map((suggestion: any) => ({
|
||||
// label: suggestion,
|
||||
// value: suggestion,
|
||||
// }))}
|
||||
// placeholder={`Select ${attrKey}`}
|
||||
// />
|
||||
// </div>
|
||||
// );
|
||||
// }
|
||||
|
||||
function BreakDown(): JSX.Element {
|
||||
// const { customFilters } = useFilterConfig({
|
||||
// signal: SignalType.METER_EXPLORER,
|
||||
// config: [],
|
||||
// });
|
||||
|
||||
return (
|
||||
<div className="meter-explorer-breakdown">
|
||||
<section className="meter-explorer-date-time">
|
||||
{/* {customFilters.map((filter) => (
|
||||
<FilterDropdown key={filter.key} attrKey={filter.key} />
|
||||
))} */}
|
||||
<DateTimeSelectionV2 showAutoRefresh={false} />
|
||||
</section>
|
||||
<section className="meter-explorer-graphs">
|
||||
<section className="total">
|
||||
<Section
|
||||
id={sections[0].id}
|
||||
title={sections[0].title}
|
||||
graphs={sections[0].graphs}
|
||||
/>
|
||||
</section>
|
||||
{sections.map((section, idx) => {
|
||||
if (idx === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
return (
|
||||
<Section
|
||||
key={section.id}
|
||||
id={section.id}
|
||||
title={section.title}
|
||||
graphs={section.graphs}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</section>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default BreakDown;
|
||||
390
frontend/src/container/MeterExplorer/Breakdown/graphs.ts
Normal file
390
frontend/src/container/MeterExplorer/Breakdown/graphs.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetWidgetQueryBuilderProps } from 'container/MetricsApplication/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import {
|
||||
IBuilderFormula,
|
||||
IBuilderQuery,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
interface GetWidgetQueryProps {
|
||||
title: string;
|
||||
description: string;
|
||||
queryData: IBuilderQuery[];
|
||||
queryFormulas?: IBuilderFormula[];
|
||||
panelTypes?: PANEL_TYPES;
|
||||
yAxisUnit?: string;
|
||||
columnUnits?: Record<string, string>;
|
||||
}
|
||||
|
||||
interface GetWidgetQueryPropsReturn extends GetWidgetQueryBuilderProps {
|
||||
description?: string;
|
||||
nullZeroValues: string;
|
||||
columnUnits?: Record<string, string>;
|
||||
}
|
||||
|
||||
export const getWidgetQueryBuilder = ({
|
||||
query,
|
||||
title = '',
|
||||
panelTypes,
|
||||
yAxisUnit = '',
|
||||
fillSpans = false,
|
||||
id,
|
||||
nullZeroValues,
|
||||
description,
|
||||
}: GetWidgetQueryPropsReturn): Widgets => ({
|
||||
description: description || '',
|
||||
id: id || uuid(),
|
||||
isStacked: false,
|
||||
nullZeroValues: nullZeroValues || '',
|
||||
opacity: '1',
|
||||
panelTypes,
|
||||
query,
|
||||
timePreferance: 'GLOBAL_TIME',
|
||||
title,
|
||||
yAxisUnit,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
selectedLogFields: [],
|
||||
selectedTracesFields: [],
|
||||
fillSpans,
|
||||
});
|
||||
|
||||
export function getWidgetQuery(
|
||||
props: GetWidgetQueryProps,
|
||||
): GetWidgetQueryPropsReturn {
|
||||
const { title, description, panelTypes, yAxisUnit, columnUnits } = props;
|
||||
return {
|
||||
title,
|
||||
yAxisUnit: yAxisUnit || 'none',
|
||||
panelTypes: panelTypes || PANEL_TYPES.TIME_SERIES,
|
||||
fillSpans: false,
|
||||
description,
|
||||
nullZeroValues: 'zero',
|
||||
columnUnits,
|
||||
query: {
|
||||
queryType: EQueryType.QUERY_BUILDER,
|
||||
promql: [],
|
||||
builder: {
|
||||
queryData: props.queryData,
|
||||
queryFormulas: (props.queryFormulas as IBuilderFormula[]) || [],
|
||||
},
|
||||
clickhouse_sql: [],
|
||||
id: uuid(),
|
||||
},
|
||||
};
|
||||
}
|
||||
export const getTotalLogSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.log.size',
|
||||
id: 'signoz.meter.log.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'sum',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Total size of log records ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.VALUE,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getTotalTraceSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.span.size',
|
||||
id: 'signoz.meter.span.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'sum',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Total size of spans ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.VALUE,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getTotalMetricDatapointCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.metric.datapoint.count',
|
||||
id: 'signoz.meter.metric.datapoint.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'sum',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Total metric datapoints ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.VALUE,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getLogCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.log.count',
|
||||
id: 'signoz.meter.log.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Count of log records ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getLogSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.log.size',
|
||||
id: 'signoz.meter.log.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'size',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Size of log records ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getSpanCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.span.count',
|
||||
id: 'signoz.meter.span.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Count of spans ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getSpanSizeWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.span.size',
|
||||
id: 'signoz.meter.span.size--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'size',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Size of spans ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'bytes',
|
||||
}),
|
||||
);
|
||||
|
||||
export const getMetricCountWidgetData = (): Widgets =>
|
||||
getWidgetQueryBuilder(
|
||||
getWidgetQuery({
|
||||
queryData: [
|
||||
{
|
||||
aggregateAttribute: {
|
||||
dataType: DataTypes.Float64,
|
||||
key: 'signoz.meter.metric.datapoint.count',
|
||||
id: 'signoz.meter.metric.datapoint.count--float64--Sum--true',
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
type: 'Sum',
|
||||
},
|
||||
aggregateOperator: 'increase',
|
||||
dataSource: DataSource.METRICS,
|
||||
source: 'meter',
|
||||
disabled: false,
|
||||
expression: 'A',
|
||||
filters: { items: [], op: 'AND' },
|
||||
functions: [],
|
||||
groupBy: [],
|
||||
having: [],
|
||||
legend: 'count',
|
||||
limit: null,
|
||||
orderBy: [],
|
||||
queryName: 'A',
|
||||
reduceTo: 'avg',
|
||||
spaceAggregation: 'sum',
|
||||
stepInterval: 60,
|
||||
timeAggregation: 'increase',
|
||||
},
|
||||
],
|
||||
title: 'Count of metric datapoints ingested',
|
||||
description: '',
|
||||
panelTypes: PANEL_TYPES.BAR,
|
||||
yAxisUnit: 'short',
|
||||
}),
|
||||
);
|
||||
@@ -43,7 +43,7 @@ function Explorer(): JSX.Element {
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
initialQueryMeterWithType,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
@@ -54,7 +54,7 @@ function Explorer(): JSX.Element {
|
||||
() =>
|
||||
updateAllQueriesOperators(
|
||||
currentQuery || initialQueryMeterWithType,
|
||||
PANEL_TYPES.TIME_SERIES,
|
||||
PANEL_TYPES.BAR,
|
||||
DataSource.METRICS,
|
||||
'meter' as 'meter' | '',
|
||||
),
|
||||
@@ -75,7 +75,7 @@ function Explorer(): JSX.Element {
|
||||
|
||||
const dashboardEditView = generateExportToDashboardLink({
|
||||
query: queryToExport || exportDefaultQuery,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
panelType: PANEL_TYPES.BAR,
|
||||
dashboardId: dashboard.id,
|
||||
widgetId,
|
||||
});
|
||||
|
||||
@@ -69,7 +69,7 @@ function TimeSeries(): JSX.Element {
|
||||
GetMetricQueryRange(
|
||||
{
|
||||
query: payload,
|
||||
graphType: PANEL_TYPES.TIME_SERIES,
|
||||
graphType: PANEL_TYPES.BAR,
|
||||
selectedTime: 'GLOBAL_TIME',
|
||||
globalSelectedInterval: globalSelectedTime,
|
||||
params: {
|
||||
@@ -131,6 +131,7 @@ function TimeSeries(): JSX.Element {
|
||||
data={datapoint}
|
||||
dataSource={DataSource.METRICS}
|
||||
yAxisUnit={yAxisUnit}
|
||||
panelType={PANEL_TYPES.BAR}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -33,7 +33,7 @@ function ExpandedView({
|
||||
options,
|
||||
spaceAggregationSeriesMap,
|
||||
step,
|
||||
metricInspectionOptions,
|
||||
appliedMetricInspectionOptions,
|
||||
timeAggregatedSeriesMap,
|
||||
}: ExpandedViewProps): JSX.Element {
|
||||
const [
|
||||
@@ -44,17 +44,17 @@ function ExpandedView({
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.InspectPointClicked, {
|
||||
[MetricsExplorerEventKeys.Modal]: 'inspect',
|
||||
[MetricsExplorerEventKeys.Filters]: metricInspectionOptions.filters,
|
||||
[MetricsExplorerEventKeys.Filters]: appliedMetricInspectionOptions.filters,
|
||||
[MetricsExplorerEventKeys.TimeAggregationInterval]:
|
||||
metricInspectionOptions.timeAggregationInterval,
|
||||
appliedMetricInspectionOptions.timeAggregationInterval,
|
||||
[MetricsExplorerEventKeys.TimeAggregationOption]:
|
||||
metricInspectionOptions.timeAggregationOption,
|
||||
appliedMetricInspectionOptions.timeAggregationOption,
|
||||
[MetricsExplorerEventKeys.SpaceAggregationOption]:
|
||||
metricInspectionOptions.spaceAggregationOption,
|
||||
appliedMetricInspectionOptions.spaceAggregationOption,
|
||||
[MetricsExplorerEventKeys.SpaceAggregationLabels]:
|
||||
metricInspectionOptions.spaceAggregationLabels,
|
||||
appliedMetricInspectionOptions.spaceAggregationLabels,
|
||||
});
|
||||
}, [metricInspectionOptions]);
|
||||
}, [appliedMetricInspectionOptions]);
|
||||
|
||||
useEffect(() => {
|
||||
if (step !== InspectionStep.COMPLETED) {
|
||||
@@ -167,7 +167,7 @@ function ExpandedView({
|
||||
<Typography.Text strong>
|
||||
{`${absoluteValue} is the ${
|
||||
SPACE_AGGREGATION_OPTIONS_FOR_EXPANDED_VIEW[
|
||||
metricInspectionOptions.spaceAggregationOption ??
|
||||
appliedMetricInspectionOptions.spaceAggregationOption ??
|
||||
SpaceAggregationOptions.SUM_BY
|
||||
]
|
||||
} of`}
|
||||
@@ -240,7 +240,7 @@ function ExpandedView({
|
||||
)?.value ?? options?.value
|
||||
} is the ${
|
||||
TIME_AGGREGATION_OPTIONS[
|
||||
metricInspectionOptions.timeAggregationOption ??
|
||||
appliedMetricInspectionOptions.timeAggregationOption ??
|
||||
TimeAggregationOptions.SUM
|
||||
]
|
||||
} of`
|
||||
@@ -299,7 +299,7 @@ function ExpandedView({
|
||||
<Typography.Text strong>
|
||||
{`${absoluteValue} is the ${
|
||||
TIME_AGGREGATION_OPTIONS[
|
||||
metricInspectionOptions.timeAggregationOption ??
|
||||
appliedMetricInspectionOptions.timeAggregationOption ??
|
||||
TimeAggregationOptions.SUM
|
||||
]
|
||||
} of`}
|
||||
|
||||
@@ -29,7 +29,7 @@ function GraphView({
|
||||
popoverOptions,
|
||||
setShowExpandedView,
|
||||
setExpandedViewOptions,
|
||||
metricInspectionOptions,
|
||||
appliedMetricInspectionOptions,
|
||||
isInspectMetricsRefetching,
|
||||
}: GraphViewProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
@@ -233,7 +233,7 @@ function GraphView({
|
||||
inspectMetricsTimeSeries={inspectMetricsTimeSeries}
|
||||
setShowExpandedView={setShowExpandedView}
|
||||
setExpandedViewOptions={setExpandedViewOptions}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
appliedMetricInspectionOptions={appliedMetricInspectionOptions}
|
||||
isInspectMetricsRefetching={isInspectMetricsRefetching}
|
||||
/>
|
||||
)}
|
||||
@@ -255,7 +255,7 @@ function GraphView({
|
||||
<HoverPopover
|
||||
options={hoverPopoverOptions}
|
||||
step={inspectionStep}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
appliedMetricInspectionOptions={appliedMetricInspectionOptions}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -122,6 +122,10 @@
|
||||
gap: 4px;
|
||||
|
||||
.inspect-metrics-query-builder-header {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
|
||||
.query-builder-button-label {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
|
||||
@@ -11,6 +11,7 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { Compass } from 'lucide-react';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { MetricsExplorerEventKeys, MetricsExplorerEvents } from '../events';
|
||||
@@ -24,6 +25,7 @@ import {
|
||||
MetricInspectionAction,
|
||||
} from './types';
|
||||
import { useInspectMetrics } from './useInspectMetrics';
|
||||
import { useMetricName } from './utils';
|
||||
|
||||
function Inspect({
|
||||
metricName: defaultMetricName,
|
||||
@@ -31,7 +33,12 @@ function Inspect({
|
||||
onClose,
|
||||
}: InspectProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [metricName, setMetricName] = useState<string | null>(defaultMetricName);
|
||||
const {
|
||||
currentMetricName,
|
||||
setCurrentMetricName,
|
||||
appliedMetricName,
|
||||
setAppliedMetricName,
|
||||
} = useMetricName(defaultMetricName);
|
||||
const [
|
||||
popoverOptions,
|
||||
setPopoverOptions,
|
||||
@@ -42,9 +49,12 @@ function Inspect({
|
||||
] = useState<GraphPopoverOptions | null>(null);
|
||||
const [showExpandedView, setShowExpandedView] = useState(false);
|
||||
|
||||
const { data: metricDetailsData } = useGetMetricDetails(metricName ?? '', {
|
||||
enabled: !!metricName,
|
||||
});
|
||||
const { data: metricDetailsData } = useGetMetricDetails(
|
||||
appliedMetricName ?? '',
|
||||
{
|
||||
enabled: !!appliedMetricName,
|
||||
},
|
||||
);
|
||||
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
@@ -97,25 +107,16 @@ function Inspect({
|
||||
aggregatedTimeSeries,
|
||||
timeAggregatedSeriesMap,
|
||||
reset,
|
||||
} = useInspectMetrics(metricName);
|
||||
} = useInspectMetrics(appliedMetricName);
|
||||
|
||||
const handleDispatchMetricInspectionOptions = useCallback(
|
||||
(action: MetricInspectionAction): void => {
|
||||
dispatchMetricInspectionOptions(action);
|
||||
logEvent(MetricsExplorerEvents.InspectQueryChanged, {
|
||||
[MetricsExplorerEventKeys.Modal]: 'inspect',
|
||||
[MetricsExplorerEventKeys.Filters]: metricInspectionOptions.filters,
|
||||
[MetricsExplorerEventKeys.TimeAggregationInterval]:
|
||||
metricInspectionOptions.timeAggregationInterval,
|
||||
[MetricsExplorerEventKeys.TimeAggregationOption]:
|
||||
metricInspectionOptions.timeAggregationOption,
|
||||
[MetricsExplorerEventKeys.SpaceAggregationOption]:
|
||||
metricInspectionOptions.spaceAggregationOption,
|
||||
[MetricsExplorerEventKeys.SpaceAggregationLabels]:
|
||||
metricInspectionOptions.spaceAggregationLabels,
|
||||
});
|
||||
},
|
||||
[dispatchMetricInspectionOptions, metricInspectionOptions],
|
||||
[dispatchMetricInspectionOptions],
|
||||
);
|
||||
|
||||
const selectedMetricType = useMemo(
|
||||
@@ -128,18 +129,30 @@ function Inspect({
|
||||
[metricDetailsData],
|
||||
);
|
||||
|
||||
const aggregateAttribute = useMemo(
|
||||
() => ({
|
||||
key: currentMetricName ?? '',
|
||||
dataType: DataTypes.String,
|
||||
type: selectedMetricType as string,
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: `${currentMetricName}--${DataTypes.String}--${selectedMetricType}--true`,
|
||||
}),
|
||||
[currentMetricName, selectedMetricType],
|
||||
);
|
||||
|
||||
const [currentQueryData, setCurrentQueryData] = useState<IBuilderQuery>({
|
||||
...searchQuery,
|
||||
aggregateAttribute,
|
||||
});
|
||||
|
||||
const resetInspection = useCallback(() => {
|
||||
setShowExpandedView(false);
|
||||
setPopoverOptions(null);
|
||||
setExpandedViewOptions(null);
|
||||
setCurrentQueryData(searchQuery as IBuilderQuery);
|
||||
reset();
|
||||
}, [reset]);
|
||||
|
||||
// Reset inspection when the selected metric changes
|
||||
useEffect(() => {
|
||||
resetInspection();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [metricName]);
|
||||
}, [reset, searchQuery]);
|
||||
|
||||
// Hide expanded view whenever inspection step changes
|
||||
useEffect(() => {
|
||||
@@ -193,7 +206,7 @@ function Inspect({
|
||||
inspectMetricsTimeSeries={aggregatedTimeSeries}
|
||||
formattedInspectMetricsTimeSeries={formattedInspectMetricsTimeSeries}
|
||||
resetInspection={resetInspection}
|
||||
metricName={metricName}
|
||||
metricName={appliedMetricName}
|
||||
metricUnit={selectedMetricUnit}
|
||||
metricType={selectedMetricType}
|
||||
spaceAggregationSeriesMap={spaceAggregationSeriesMap}
|
||||
@@ -203,19 +216,20 @@ function Inspect({
|
||||
showExpandedView={showExpandedView}
|
||||
setExpandedViewOptions={setExpandedViewOptions}
|
||||
popoverOptions={popoverOptions}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
appliedMetricInspectionOptions={metricInspectionOptions.appliedOptions}
|
||||
isInspectMetricsRefetching={isInspectMetricsRefetching}
|
||||
/>
|
||||
<QueryBuilder
|
||||
metricName={metricName}
|
||||
metricType={selectedMetricType}
|
||||
setMetricName={setMetricName}
|
||||
currentMetricName={currentMetricName}
|
||||
setCurrentMetricName={setCurrentMetricName}
|
||||
setAppliedMetricName={setAppliedMetricName}
|
||||
spaceAggregationLabels={spaceAggregationLabels}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
currentMetricInspectionOptions={metricInspectionOptions.currentOptions}
|
||||
dispatchMetricInspectionOptions={handleDispatchMetricInspectionOptions}
|
||||
inspectionStep={inspectionStep}
|
||||
inspectMetricsTimeSeries={inspectMetricsTimeSeries}
|
||||
searchQuery={searchQuery as IBuilderQuery}
|
||||
currentQuery={currentQueryData}
|
||||
setCurrentQuery={setCurrentQueryData}
|
||||
/>
|
||||
</div>
|
||||
<div className="inspect-metrics-content-second-col">
|
||||
@@ -228,7 +242,7 @@ function Inspect({
|
||||
options={expandedViewOptions}
|
||||
spaceAggregationSeriesMap={spaceAggregationSeriesMap}
|
||||
step={inspectionStep}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
appliedMetricInspectionOptions={metricInspectionOptions.appliedOptions}
|
||||
timeAggregatedSeriesMap={timeAggregatedSeriesMap}
|
||||
/>
|
||||
)}
|
||||
@@ -244,17 +258,21 @@ function Inspect({
|
||||
aggregatedTimeSeries,
|
||||
formattedInspectMetricsTimeSeries,
|
||||
resetInspection,
|
||||
metricName,
|
||||
appliedMetricName,
|
||||
selectedMetricUnit,
|
||||
selectedMetricType,
|
||||
spaceAggregationSeriesMap,
|
||||
inspectionStep,
|
||||
showExpandedView,
|
||||
popoverOptions,
|
||||
metricInspectionOptions,
|
||||
metricInspectionOptions.appliedOptions,
|
||||
metricInspectionOptions.currentOptions,
|
||||
currentMetricName,
|
||||
setCurrentMetricName,
|
||||
setAppliedMetricName,
|
||||
spaceAggregationLabels,
|
||||
handleDispatchMetricInspectionOptions,
|
||||
searchQuery,
|
||||
currentQueryData,
|
||||
expandedViewOptions,
|
||||
timeAggregatedSeriesMap,
|
||||
]);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Button, Card } from 'antd';
|
||||
import { Atom } from 'lucide-react';
|
||||
import { Atom, Play } from 'lucide-react';
|
||||
import { useCallback } from 'react';
|
||||
|
||||
import { QueryBuilderProps } from './types';
|
||||
import {
|
||||
@@ -10,16 +11,24 @@ import {
|
||||
} from './utils';
|
||||
|
||||
function QueryBuilder({
|
||||
metricName,
|
||||
setMetricName,
|
||||
currentMetricName,
|
||||
setCurrentMetricName,
|
||||
setAppliedMetricName,
|
||||
spaceAggregationLabels,
|
||||
metricInspectionOptions,
|
||||
currentMetricInspectionOptions,
|
||||
dispatchMetricInspectionOptions,
|
||||
inspectionStep,
|
||||
inspectMetricsTimeSeries,
|
||||
searchQuery,
|
||||
metricType,
|
||||
currentQuery,
|
||||
setCurrentQuery,
|
||||
}: QueryBuilderProps): JSX.Element {
|
||||
const applyInspectionOptions = useCallback(() => {
|
||||
setAppliedMetricName(currentMetricName ?? '');
|
||||
dispatchMetricInspectionOptions({
|
||||
type: 'APPLY_INSPECTION_OPTIONS',
|
||||
});
|
||||
}, [currentMetricName, setAppliedMetricName, dispatchMetricInspectionOptions]);
|
||||
|
||||
return (
|
||||
<div className="inspect-metrics-query-builder">
|
||||
<div className="inspect-metrics-query-builder-header">
|
||||
@@ -31,25 +40,36 @@ function QueryBuilder({
|
||||
>
|
||||
Query Builder
|
||||
</Button>
|
||||
<Button
|
||||
type="primary"
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
onClick={applyInspectionOptions}
|
||||
data-testid="apply-query-button"
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
</div>
|
||||
<Card className="inspect-metrics-query-builder-content">
|
||||
<MetricNameSearch metricName={metricName} setMetricName={setMetricName} />
|
||||
<MetricNameSearch
|
||||
currentMetricName={currentMetricName}
|
||||
setCurrentMetricName={setCurrentMetricName}
|
||||
/>
|
||||
<MetricFilters
|
||||
dispatchMetricInspectionOptions={dispatchMetricInspectionOptions}
|
||||
searchQuery={searchQuery}
|
||||
metricName={metricName}
|
||||
metricType={metricType || null}
|
||||
currentQuery={currentQuery}
|
||||
setCurrentQuery={setCurrentQuery}
|
||||
/>
|
||||
<MetricTimeAggregation
|
||||
inspectionStep={inspectionStep}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
currentMetricInspectionOptions={currentMetricInspectionOptions}
|
||||
dispatchMetricInspectionOptions={dispatchMetricInspectionOptions}
|
||||
inspectMetricsTimeSeries={inspectMetricsTimeSeries}
|
||||
/>
|
||||
<MetricSpaceAggregation
|
||||
inspectionStep={inspectionStep}
|
||||
spaceAggregationLabels={spaceAggregationLabels}
|
||||
metricInspectionOptions={metricInspectionOptions}
|
||||
currentMetricInspectionOptions={currentMetricInspectionOptions}
|
||||
dispatchMetricInspectionOptions={dispatchMetricInspectionOptions}
|
||||
/>
|
||||
</Card>
|
||||
|
||||
@@ -11,13 +11,13 @@ function TableView({
|
||||
setShowExpandedView,
|
||||
setExpandedViewOptions,
|
||||
isInspectMetricsRefetching,
|
||||
metricInspectionOptions,
|
||||
appliedMetricInspectionOptions,
|
||||
}: TableViewProps): JSX.Element {
|
||||
const isSpaceAggregatedWithoutLabel = useMemo(
|
||||
() =>
|
||||
!!metricInspectionOptions.spaceAggregationOption &&
|
||||
metricInspectionOptions.spaceAggregationLabels.length === 0,
|
||||
[metricInspectionOptions],
|
||||
!!appliedMetricInspectionOptions.spaceAggregationOption &&
|
||||
appliedMetricInspectionOptions.spaceAggregationLabels.length === 0,
|
||||
[appliedMetricInspectionOptions],
|
||||
);
|
||||
const labelKeys = useMemo(() => {
|
||||
if (isSpaceAggregatedWithoutLabel) {
|
||||
|
||||
@@ -10,7 +10,7 @@ import ExpandedView from '../ExpandedView';
|
||||
import {
|
||||
GraphPopoverData,
|
||||
InspectionStep,
|
||||
MetricInspectionOptions,
|
||||
InspectOptions,
|
||||
SpaceAggregationOptions,
|
||||
TimeAggregationOptions,
|
||||
} from '../types';
|
||||
@@ -62,7 +62,7 @@ describe('ExpandedView', () => {
|
||||
],
|
||||
]);
|
||||
|
||||
const mockMetricInspectionOptions: MetricInspectionOptions = {
|
||||
const mockMetricInspectionOptions: InspectOptions = {
|
||||
timeAggregationOption: TimeAggregationOptions.MAX,
|
||||
timeAggregationInterval: 60,
|
||||
spaceAggregationOption: SpaceAggregationOptions.MAX_BY,
|
||||
@@ -79,7 +79,7 @@ describe('ExpandedView', () => {
|
||||
options={mockOptions}
|
||||
spaceAggregationSeriesMap={mockSpaceAggregationSeriesMap}
|
||||
step={InspectionStep.TIME_AGGREGATION}
|
||||
metricInspectionOptions={mockMetricInspectionOptions}
|
||||
appliedMetricInspectionOptions={mockMetricInspectionOptions}
|
||||
timeAggregatedSeriesMap={mockTimeAggregatedSeriesMap}
|
||||
/>,
|
||||
);
|
||||
@@ -96,7 +96,7 @@ describe('ExpandedView', () => {
|
||||
options={mockOptions}
|
||||
spaceAggregationSeriesMap={mockSpaceAggregationSeriesMap}
|
||||
step={InspectionStep.SPACE_AGGREGATION}
|
||||
metricInspectionOptions={{
|
||||
appliedMetricInspectionOptions={{
|
||||
...mockMetricInspectionOptions,
|
||||
timeAggregationInterval: TIME_AGGREGATION_INTERVAL,
|
||||
}}
|
||||
@@ -127,7 +127,7 @@ describe('ExpandedView', () => {
|
||||
options={mockOptions}
|
||||
spaceAggregationSeriesMap={mockSpaceAggregationSeriesMap}
|
||||
step={InspectionStep.COMPLETED}
|
||||
metricInspectionOptions={mockMetricInspectionOptions}
|
||||
appliedMetricInspectionOptions={mockMetricInspectionOptions}
|
||||
timeAggregatedSeriesMap={mockTimeAggregatedSeriesMap}
|
||||
/>,
|
||||
);
|
||||
@@ -153,7 +153,7 @@ describe('ExpandedView', () => {
|
||||
options={mockOptions}
|
||||
spaceAggregationSeriesMap={mockSpaceAggregationSeriesMap}
|
||||
step={InspectionStep.TIME_AGGREGATION}
|
||||
metricInspectionOptions={mockMetricInspectionOptions}
|
||||
appliedMetricInspectionOptions={mockMetricInspectionOptions}
|
||||
timeAggregatedSeriesMap={mockTimeAggregatedSeriesMap}
|
||||
/>,
|
||||
);
|
||||
|
||||
@@ -60,7 +60,7 @@ describe('GraphView', () => {
|
||||
setExpandedViewOptions: jest.fn(),
|
||||
resetInspection: jest.fn(),
|
||||
showExpandedView: false,
|
||||
metricInspectionOptions: {
|
||||
appliedMetricInspectionOptions: {
|
||||
timeAggregationInterval: 60,
|
||||
spaceAggregationOption: SpaceAggregationOptions.MAX_BY,
|
||||
spaceAggregationLabels: ['host_name'],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable react/jsx-props-no-spreading */
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import * as appContextHooks from 'providers/App/App';
|
||||
import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
@@ -22,6 +22,27 @@ jest.mock('react-router-dom', () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('container/QueryBuilder/filters', () => ({
|
||||
AggregatorFilter: ({ onSelect, onChange, defaultValue }: any): JSX.Element => (
|
||||
<div data-testid="mock-aggregator-filter">
|
||||
<input
|
||||
data-testid="metric-name-input"
|
||||
defaultValue={defaultValue}
|
||||
onChange={(e: React.ChangeEvent<HTMLInputElement>): void =>
|
||||
onChange({ key: e.target.value })
|
||||
}
|
||||
/>
|
||||
<button
|
||||
type="button"
|
||||
data-testid="select-metric-button"
|
||||
onClick={(): void => onSelect({ key: 'test_metric_2' })}
|
||||
>
|
||||
Select Metric
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
}));
|
||||
|
||||
jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
user: {
|
||||
role: 'admin',
|
||||
@@ -48,12 +69,16 @@ jest.spyOn(appContextHooks, 'useAppContext').mockReturnValue({
|
||||
|
||||
const queryClient = new QueryClient();
|
||||
|
||||
const mockSetCurrentMetricName = jest.fn();
|
||||
const mockSetAppliedMetricName = jest.fn();
|
||||
|
||||
describe('QueryBuilder', () => {
|
||||
const defaultProps = {
|
||||
metricName: 'test_metric',
|
||||
setMetricName: jest.fn(),
|
||||
currentMetricName: 'test_metric',
|
||||
setCurrentMetricName: mockSetCurrentMetricName,
|
||||
setAppliedMetricName: mockSetAppliedMetricName,
|
||||
spaceAggregationLabels: ['label1', 'label2'],
|
||||
metricInspectionOptions: {
|
||||
currentMetricInspectionOptions: {
|
||||
timeAggregationInterval: 60,
|
||||
timeAggregationOption: TimeAggregationOptions.AVG,
|
||||
spaceAggregationLabels: [],
|
||||
@@ -67,12 +92,13 @@ describe('QueryBuilder', () => {
|
||||
metricType: MetricType.SUM,
|
||||
inspectionStep: InspectionStep.TIME_AGGREGATION,
|
||||
inspectMetricsTimeSeries: [],
|
||||
searchQuery: {
|
||||
currentQuery: {
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'and',
|
||||
},
|
||||
} as any,
|
||||
setCurrentQuery: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -133,4 +159,57 @@ describe('QueryBuilder', () => {
|
||||
);
|
||||
expect(screen.getByTestId('metric-space-aggregation')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call setCurrentMetricName when metric name is selected', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<QueryBuilder {...defaultProps} />
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const metricNameSearch = screen.getByTestId('metric-name-search');
|
||||
expect(metricNameSearch).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText('From')).toBeInTheDocument();
|
||||
|
||||
const selectButton = screen.getByTestId('select-metric-button');
|
||||
fireEvent.click(selectButton);
|
||||
|
||||
expect(mockSetCurrentMetricName).toHaveBeenCalledWith('test_metric_2');
|
||||
});
|
||||
|
||||
it('should call setAppliedMetricName when query is applied', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<QueryBuilder {...defaultProps} />
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const applyQueryButton = screen.getByTestId('apply-query-button');
|
||||
fireEvent.click(applyQueryButton);
|
||||
|
||||
expect(mockSetCurrentMetricName).toHaveBeenCalledTimes(0);
|
||||
expect(mockSetAppliedMetricName).toHaveBeenCalledWith('test_metric');
|
||||
});
|
||||
|
||||
it('should apply inspect options when query is applied', () => {
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<Provider store={store}>
|
||||
<QueryBuilder {...defaultProps} />
|
||||
</Provider>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
const applyQueryButton = screen.getByTestId('apply-query-button');
|
||||
fireEvent.click(applyQueryButton);
|
||||
|
||||
expect(defaultProps.dispatchMetricInspectionOptions).toHaveBeenCalledWith({
|
||||
type: 'APPLY_INSPECTION_OPTIONS',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -49,7 +49,7 @@ describe('TableView', () => {
|
||||
inspectMetricsTimeSeries: mockTimeSeries,
|
||||
setShowExpandedView: jest.fn(),
|
||||
setExpandedViewOptions: jest.fn(),
|
||||
metricInspectionOptions: {
|
||||
appliedMetricInspectionOptions: {
|
||||
timeAggregationInterval: 60,
|
||||
timeAggregationOption: TimeAggregationOptions.MAX,
|
||||
spaceAggregationOption: SpaceAggregationOptions.MAX_BY,
|
||||
|
||||
@@ -72,13 +72,25 @@ export const SPACE_AGGREGATION_OPTIONS_FOR_EXPANDED_VIEW: Record<
|
||||
};
|
||||
|
||||
export const INITIAL_INSPECT_METRICS_OPTIONS: MetricInspectionOptions = {
|
||||
timeAggregationOption: undefined,
|
||||
timeAggregationInterval: undefined,
|
||||
spaceAggregationOption: undefined,
|
||||
spaceAggregationLabels: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
currentOptions: {
|
||||
timeAggregationOption: undefined,
|
||||
timeAggregationInterval: undefined,
|
||||
spaceAggregationOption: undefined,
|
||||
spaceAggregationLabels: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
appliedOptions: {
|
||||
timeAggregationOption: undefined,
|
||||
timeAggregationInterval: undefined,
|
||||
spaceAggregationOption: undefined,
|
||||
spaceAggregationLabels: [],
|
||||
filters: {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -43,36 +43,36 @@ export interface GraphViewProps {
|
||||
showExpandedView: boolean;
|
||||
setShowExpandedView: (showExpandedView: boolean) => void;
|
||||
setExpandedViewOptions: (options: GraphPopoverOptions | null) => void;
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
appliedMetricInspectionOptions: InspectOptions;
|
||||
isInspectMetricsRefetching: boolean;
|
||||
}
|
||||
|
||||
export interface QueryBuilderProps {
|
||||
metricName: string | null;
|
||||
setMetricName: (metricName: string) => void;
|
||||
metricType: MetricType | undefined;
|
||||
currentMetricName: string | null;
|
||||
setCurrentMetricName: (metricName: string) => void;
|
||||
setAppliedMetricName: (metricName: string) => void;
|
||||
spaceAggregationLabels: string[];
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
currentMetricInspectionOptions: InspectOptions;
|
||||
dispatchMetricInspectionOptions: (action: MetricInspectionAction) => void;
|
||||
inspectionStep: InspectionStep;
|
||||
inspectMetricsTimeSeries: InspectMetricsSeries[];
|
||||
searchQuery: IBuilderQuery;
|
||||
currentQuery: IBuilderQuery;
|
||||
setCurrentQuery: (query: IBuilderQuery) => void;
|
||||
}
|
||||
|
||||
export interface MetricNameSearchProps {
|
||||
metricName: string | null;
|
||||
setMetricName: (metricName: string) => void;
|
||||
currentMetricName: string | null;
|
||||
setCurrentMetricName: (metricName: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricFiltersProps {
|
||||
searchQuery: IBuilderQuery;
|
||||
dispatchMetricInspectionOptions: (action: MetricInspectionAction) => void;
|
||||
metricName: string | null;
|
||||
metricType: MetricType | null;
|
||||
currentQuery: IBuilderQuery;
|
||||
setCurrentQuery: (query: IBuilderQuery) => void;
|
||||
}
|
||||
|
||||
export interface MetricTimeAggregationProps {
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
currentMetricInspectionOptions: InspectOptions;
|
||||
dispatchMetricInspectionOptions: (action: MetricInspectionAction) => void;
|
||||
inspectionStep: InspectionStep;
|
||||
inspectMetricsTimeSeries: InspectMetricsSeries[];
|
||||
@@ -80,7 +80,7 @@ export interface MetricTimeAggregationProps {
|
||||
|
||||
export interface MetricSpaceAggregationProps {
|
||||
spaceAggregationLabels: string[];
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
currentMetricInspectionOptions: InspectOptions;
|
||||
dispatchMetricInspectionOptions: (action: MetricInspectionAction) => void;
|
||||
inspectionStep: InspectionStep;
|
||||
}
|
||||
@@ -101,7 +101,7 @@ export enum SpaceAggregationOptions {
|
||||
AVG_BY = 'avg_by',
|
||||
}
|
||||
|
||||
export interface MetricInspectionOptions {
|
||||
export interface InspectOptions {
|
||||
timeAggregationOption: TimeAggregationOptions | undefined;
|
||||
timeAggregationInterval: number | undefined;
|
||||
spaceAggregationOption: SpaceAggregationOptions | undefined;
|
||||
@@ -109,13 +109,19 @@ export interface MetricInspectionOptions {
|
||||
filters: TagFilter;
|
||||
}
|
||||
|
||||
export interface MetricInspectionOptions {
|
||||
currentOptions: InspectOptions;
|
||||
appliedOptions: InspectOptions;
|
||||
}
|
||||
|
||||
export type MetricInspectionAction =
|
||||
| { type: 'SET_TIME_AGGREGATION_OPTION'; payload: TimeAggregationOptions }
|
||||
| { type: 'SET_TIME_AGGREGATION_INTERVAL'; payload: number }
|
||||
| { type: 'SET_SPACE_AGGREGATION_OPTION'; payload: SpaceAggregationOptions }
|
||||
| { type: 'SET_SPACE_AGGREGATION_LABELS'; payload: string[] }
|
||||
| { type: 'SET_FILTERS'; payload: TagFilter }
|
||||
| { type: 'RESET_INSPECTION' };
|
||||
| { type: 'RESET_INSPECTION' }
|
||||
| { type: 'APPLY_INSPECTION_OPTIONS' };
|
||||
|
||||
export enum InspectionStep {
|
||||
TIME_AGGREGATION = 1,
|
||||
@@ -156,7 +162,7 @@ export interface ExpandedViewProps {
|
||||
options: GraphPopoverOptions | null;
|
||||
spaceAggregationSeriesMap: Map<string, InspectMetricsSeries[]>;
|
||||
step: InspectionStep;
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
appliedMetricInspectionOptions: InspectOptions;
|
||||
timeAggregatedSeriesMap: Map<number, GraphPopoverData[]>;
|
||||
}
|
||||
|
||||
@@ -165,7 +171,7 @@ export interface TableViewProps {
|
||||
inspectMetricsTimeSeries: InspectMetricsSeries[];
|
||||
setShowExpandedView: (showExpandedView: boolean) => void;
|
||||
setExpandedViewOptions: (options: GraphPopoverOptions | null) => void;
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
appliedMetricInspectionOptions: InspectOptions;
|
||||
isInspectMetricsRefetching: boolean;
|
||||
}
|
||||
|
||||
|
||||
@@ -27,30 +27,55 @@ const metricInspectionReducer = (
|
||||
case 'SET_TIME_AGGREGATION_OPTION':
|
||||
return {
|
||||
...state,
|
||||
timeAggregationOption: action.payload,
|
||||
currentOptions: {
|
||||
...state.currentOptions,
|
||||
timeAggregationOption: action.payload,
|
||||
},
|
||||
};
|
||||
case 'SET_TIME_AGGREGATION_INTERVAL':
|
||||
return {
|
||||
...state,
|
||||
timeAggregationInterval: action.payload,
|
||||
currentOptions: {
|
||||
...state.currentOptions,
|
||||
timeAggregationInterval: action.payload,
|
||||
},
|
||||
};
|
||||
case 'SET_SPACE_AGGREGATION_OPTION':
|
||||
return {
|
||||
...state,
|
||||
spaceAggregationOption: action.payload,
|
||||
currentOptions: {
|
||||
...state.currentOptions,
|
||||
spaceAggregationOption: action.payload,
|
||||
},
|
||||
};
|
||||
case 'SET_SPACE_AGGREGATION_LABELS':
|
||||
return {
|
||||
...state,
|
||||
spaceAggregationLabels: action.payload,
|
||||
currentOptions: {
|
||||
...state.currentOptions,
|
||||
spaceAggregationLabels: action.payload,
|
||||
},
|
||||
};
|
||||
case 'SET_FILTERS':
|
||||
return {
|
||||
...state,
|
||||
filters: action.payload,
|
||||
currentOptions: {
|
||||
...state.currentOptions,
|
||||
filters: action.payload,
|
||||
},
|
||||
};
|
||||
case 'APPLY_INSPECTION_OPTIONS':
|
||||
return {
|
||||
...state,
|
||||
appliedOptions: {
|
||||
...state.appliedOptions,
|
||||
...state.currentOptions,
|
||||
},
|
||||
};
|
||||
case 'RESET_INSPECTION':
|
||||
return { ...INITIAL_INSPECT_METRICS_OPTIONS };
|
||||
return {
|
||||
...INITIAL_INSPECT_METRICS_OPTIONS,
|
||||
};
|
||||
default:
|
||||
return state;
|
||||
}
|
||||
@@ -84,7 +109,7 @@ export function useInspectMetrics(
|
||||
metricName: metricName ?? '',
|
||||
start,
|
||||
end,
|
||||
filters: metricInspectionOptions.filters,
|
||||
filters: metricInspectionOptions.appliedOptions.filters,
|
||||
},
|
||||
{
|
||||
enabled: !!metricName,
|
||||
@@ -117,13 +142,26 @@ export function useInspectMetrics(
|
||||
);
|
||||
|
||||
// Evaluate inspection step
|
||||
const inspectionStep = useMemo(() => {
|
||||
if (metricInspectionOptions.spaceAggregationOption) {
|
||||
const currentInspectionStep = useMemo(() => {
|
||||
if (metricInspectionOptions.currentOptions.spaceAggregationOption) {
|
||||
return InspectionStep.COMPLETED;
|
||||
}
|
||||
if (
|
||||
metricInspectionOptions.timeAggregationOption &&
|
||||
metricInspectionOptions.timeAggregationInterval
|
||||
metricInspectionOptions.currentOptions.timeAggregationOption &&
|
||||
metricInspectionOptions.currentOptions.timeAggregationInterval
|
||||
) {
|
||||
return InspectionStep.SPACE_AGGREGATION;
|
||||
}
|
||||
return InspectionStep.TIME_AGGREGATION;
|
||||
}, [metricInspectionOptions]);
|
||||
|
||||
const appliedInspectionStep = useMemo(() => {
|
||||
if (metricInspectionOptions.appliedOptions.spaceAggregationOption) {
|
||||
return InspectionStep.COMPLETED;
|
||||
}
|
||||
if (
|
||||
metricInspectionOptions.appliedOptions.timeAggregationOption &&
|
||||
metricInspectionOptions.appliedOptions.timeAggregationInterval
|
||||
) {
|
||||
return InspectionStep.SPACE_AGGREGATION;
|
||||
}
|
||||
@@ -149,23 +187,26 @@ export function useInspectMetrics(
|
||||
|
||||
// Apply time aggregation once required options are set
|
||||
if (
|
||||
inspectionStep >= InspectionStep.SPACE_AGGREGATION &&
|
||||
metricInspectionOptions.timeAggregationOption &&
|
||||
metricInspectionOptions.timeAggregationInterval
|
||||
appliedInspectionStep >= InspectionStep.SPACE_AGGREGATION &&
|
||||
metricInspectionOptions.appliedOptions.timeAggregationOption &&
|
||||
metricInspectionOptions.appliedOptions.timeAggregationInterval
|
||||
) {
|
||||
const {
|
||||
timeAggregatedSeries,
|
||||
timeAggregatedSeriesMap,
|
||||
} = applyTimeAggregation(inspectMetricsTimeSeries, metricInspectionOptions);
|
||||
} = applyTimeAggregation(
|
||||
inspectMetricsTimeSeries,
|
||||
metricInspectionOptions.appliedOptions,
|
||||
);
|
||||
timeSeries = timeAggregatedSeries;
|
||||
setTimeAggregatedSeriesMap(timeAggregatedSeriesMap);
|
||||
setAggregatedTimeSeries(timeSeries);
|
||||
}
|
||||
// Apply space aggregation
|
||||
if (inspectionStep === InspectionStep.COMPLETED) {
|
||||
if (appliedInspectionStep === InspectionStep.COMPLETED) {
|
||||
const { aggregatedSeries, spaceAggregatedSeriesMap } = applySpaceAggregation(
|
||||
timeSeries,
|
||||
metricInspectionOptions,
|
||||
metricInspectionOptions.appliedOptions,
|
||||
);
|
||||
timeSeries = aggregatedSeries;
|
||||
setSpaceAggregatedSeriesMap(spaceAggregatedSeriesMap);
|
||||
@@ -186,7 +227,7 @@ export function useInspectMetrics(
|
||||
|
||||
const rawData = [timestamps, ...timeseriesArray];
|
||||
return rawData.map((series) => new Float64Array(series));
|
||||
}, [inspectMetricsTimeSeries, inspectionStep, metricInspectionOptions]);
|
||||
}, [inspectMetricsTimeSeries, appliedInspectionStep, metricInspectionOptions]);
|
||||
|
||||
const spaceAggregationLabels = useMemo(() => {
|
||||
const labels = new Set<string>();
|
||||
@@ -216,7 +257,7 @@ export function useInspectMetrics(
|
||||
spaceAggregationLabels,
|
||||
metricInspectionOptions,
|
||||
dispatchMetricInspectionOptions,
|
||||
inspectionStep,
|
||||
inspectionStep: currentInspectionStep,
|
||||
isInspectMetricsRefetching,
|
||||
spaceAggregatedSeriesMap,
|
||||
aggregatedTimeSeries,
|
||||
|
||||
@@ -4,16 +4,12 @@ import logEvent from 'api/common/logEvent';
|
||||
import { InspectMetricsSeries } from 'api/metricsExplorer/getInspectMetricsDetails';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import classNames from 'classnames';
|
||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||
import { convertExpressionToFilters } from 'components/QueryBuilderV2/utils';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { AggregatorFilter } from 'container/QueryBuilder/filters';
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { useQueryOperations } from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import { HardHat } from 'lucide-react';
|
||||
import { useMemo, useState } from 'react';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { BaseAutocompleteData } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
@@ -26,8 +22,8 @@ import {
|
||||
GraphPopoverData,
|
||||
GraphPopoverOptions,
|
||||
InspectionStep,
|
||||
InspectOptions,
|
||||
MetricFiltersProps,
|
||||
MetricInspectionOptions,
|
||||
MetricNameSearchProps,
|
||||
MetricSpaceAggregationProps,
|
||||
MetricTimeAggregationProps,
|
||||
@@ -71,13 +67,13 @@ export function getDefaultTimeAggregationInterval(
|
||||
}
|
||||
|
||||
export function MetricNameSearch({
|
||||
metricName,
|
||||
setMetricName,
|
||||
currentMetricName,
|
||||
setCurrentMetricName,
|
||||
}: MetricNameSearchProps): JSX.Element {
|
||||
const [searchText, setSearchText] = useState(metricName);
|
||||
const [searchText, setSearchText] = useState(currentMetricName);
|
||||
|
||||
const handleSetMetricName = (value: BaseAutocompleteData): void => {
|
||||
setMetricName(value.key);
|
||||
setCurrentMetricName(value.key);
|
||||
};
|
||||
|
||||
const handleChange = (value: BaseAutocompleteData): void => {
|
||||
@@ -102,27 +98,31 @@ export function MetricNameSearch({
|
||||
|
||||
export function MetricFilters({
|
||||
dispatchMetricInspectionOptions,
|
||||
searchQuery,
|
||||
metricName,
|
||||
metricType,
|
||||
currentQuery,
|
||||
setCurrentQuery,
|
||||
}: MetricFiltersProps): JSX.Element {
|
||||
const { handleChangeQueryData } = useQueryOperations({
|
||||
index: 0,
|
||||
query: searchQuery,
|
||||
entityVersion: '',
|
||||
});
|
||||
|
||||
const aggregateAttribute = useMemo(
|
||||
() => ({
|
||||
key: metricName ?? '',
|
||||
dataType: DataTypes.String,
|
||||
type: metricType,
|
||||
isColumn: true,
|
||||
isJSON: false,
|
||||
id: `${metricName}--${DataTypes.String}--${metricType}--true`,
|
||||
}),
|
||||
[metricName, metricType],
|
||||
);
|
||||
const handleOnChange = (expression: string): void => {
|
||||
logEvent(MetricsExplorerEvents.FilterApplied, {
|
||||
[MetricsExplorerEventKeys.Modal]: 'inspect',
|
||||
});
|
||||
const tagFilter = {
|
||||
items: convertExpressionToFilters(expression),
|
||||
op: 'AND',
|
||||
};
|
||||
setCurrentQuery({
|
||||
...currentQuery,
|
||||
filters: tagFilter,
|
||||
filter: {
|
||||
...currentQuery.filter,
|
||||
expression,
|
||||
},
|
||||
expression,
|
||||
});
|
||||
dispatchMetricInspectionOptions({
|
||||
type: 'SET_FILTERS',
|
||||
payload: tagFilter,
|
||||
});
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -130,30 +130,19 @@ export function MetricFilters({
|
||||
className="inspect-metrics-input-group metric-filters"
|
||||
>
|
||||
<Typography.Text>Where</Typography.Text>
|
||||
<QueryBuilderSearch
|
||||
query={{
|
||||
...searchQuery,
|
||||
aggregateAttribute,
|
||||
}}
|
||||
onChange={(value): void => {
|
||||
handleChangeQueryData('filters', value);
|
||||
logEvent(MetricsExplorerEvents.FilterApplied, {
|
||||
[MetricsExplorerEventKeys.Modal]: 'inspect',
|
||||
});
|
||||
dispatchMetricInspectionOptions({
|
||||
type: 'SET_FILTERS',
|
||||
payload: value,
|
||||
});
|
||||
}}
|
||||
suffixIcon={<HardHat size={16} />}
|
||||
disableNavigationShortcuts
|
||||
/>
|
||||
{currentQuery && (
|
||||
<QuerySearch
|
||||
queryData={currentQuery}
|
||||
onChange={handleOnChange}
|
||||
dataSource={DataSource.METRICS}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function MetricTimeAggregation({
|
||||
metricInspectionOptions,
|
||||
currentMetricInspectionOptions,
|
||||
dispatchMetricInspectionOptions,
|
||||
inspectionStep,
|
||||
inspectMetricsTimeSeries,
|
||||
@@ -174,14 +163,14 @@ export function MetricTimeAggregation({
|
||||
<div className="inspect-metrics-input-group">
|
||||
<Typography.Text>Align with</Typography.Text>
|
||||
<Select
|
||||
value={metricInspectionOptions.timeAggregationOption}
|
||||
value={currentMetricInspectionOptions.timeAggregationOption}
|
||||
onChange={(value): void => {
|
||||
dispatchMetricInspectionOptions({
|
||||
type: 'SET_TIME_AGGREGATION_OPTION',
|
||||
payload: value,
|
||||
});
|
||||
// set the time aggregation interval to the default value if it is not set
|
||||
if (!metricInspectionOptions.timeAggregationInterval) {
|
||||
if (!currentMetricInspectionOptions.timeAggregationInterval) {
|
||||
dispatchMetricInspectionOptions({
|
||||
type: 'SET_TIME_AGGREGATION_INTERVAL',
|
||||
payload: getDefaultTimeAggregationInterval(
|
||||
@@ -205,7 +194,7 @@ export function MetricTimeAggregation({
|
||||
<Input
|
||||
type="number"
|
||||
className="no-arrows-input"
|
||||
value={metricInspectionOptions.timeAggregationInterval}
|
||||
value={currentMetricInspectionOptions.timeAggregationInterval}
|
||||
placeholder="Select interval..."
|
||||
suffix="seconds"
|
||||
onChange={(e): void => {
|
||||
@@ -224,7 +213,7 @@ export function MetricTimeAggregation({
|
||||
|
||||
export function MetricSpaceAggregation({
|
||||
spaceAggregationLabels,
|
||||
metricInspectionOptions,
|
||||
currentMetricInspectionOptions,
|
||||
dispatchMetricInspectionOptions,
|
||||
inspectionStep,
|
||||
}: MetricSpaceAggregationProps): JSX.Element {
|
||||
@@ -243,7 +232,7 @@ export function MetricSpaceAggregation({
|
||||
<div className="metric-space-aggregation-content">
|
||||
<div className="metric-space-aggregation-content-left">
|
||||
<Select
|
||||
value={metricInspectionOptions.spaceAggregationOption}
|
||||
value={currentMetricInspectionOptions.spaceAggregationOption}
|
||||
placeholder="Select option"
|
||||
onChange={(value): void => {
|
||||
dispatchMetricInspectionOptions({
|
||||
@@ -266,7 +255,7 @@ export function MetricSpaceAggregation({
|
||||
mode="multiple"
|
||||
style={{ width: '100%' }}
|
||||
placeholder="Search for attributes..."
|
||||
value={metricInspectionOptions.spaceAggregationLabels}
|
||||
value={currentMetricInspectionOptions.spaceAggregationLabels}
|
||||
onChange={(value): void => {
|
||||
dispatchMetricInspectionOptions({
|
||||
type: 'SET_SPACE_AGGREGATION_LABELS',
|
||||
@@ -322,7 +311,7 @@ export function applyFilters(
|
||||
|
||||
export function applyTimeAggregation(
|
||||
inspectMetricsTimeSeries: InspectMetricsSeries[],
|
||||
metricInspectionOptions: MetricInspectionOptions,
|
||||
appliedMetricInspectionOptions: InspectOptions,
|
||||
): {
|
||||
timeAggregatedSeries: InspectMetricsSeries[];
|
||||
timeAggregatedSeriesMap: Map<number, GraphPopoverData[]>;
|
||||
@@ -330,7 +319,7 @@ export function applyTimeAggregation(
|
||||
const {
|
||||
timeAggregationOption,
|
||||
timeAggregationInterval,
|
||||
} = metricInspectionOptions;
|
||||
} = appliedMetricInspectionOptions;
|
||||
|
||||
if (!timeAggregationInterval) {
|
||||
return {
|
||||
@@ -415,7 +404,7 @@ export function applyTimeAggregation(
|
||||
|
||||
export function applySpaceAggregation(
|
||||
inspectMetricsTimeSeries: InspectMetricsSeries[],
|
||||
metricInspectionOptions: MetricInspectionOptions,
|
||||
appliedMetricInspectionOptions: InspectOptions,
|
||||
): {
|
||||
aggregatedSeries: InspectMetricsSeries[];
|
||||
spaceAggregatedSeriesMap: Map<string, InspectMetricsSeries[]>;
|
||||
@@ -425,7 +414,7 @@ export function applySpaceAggregation(
|
||||
|
||||
inspectMetricsTimeSeries.forEach((series) => {
|
||||
// Create composite key from selected labels
|
||||
const key = metricInspectionOptions.spaceAggregationLabels
|
||||
const key = appliedMetricInspectionOptions.spaceAggregationLabels
|
||||
.map((label) => `${label}:${series.labels[label]}`)
|
||||
.join(',');
|
||||
|
||||
@@ -460,7 +449,7 @@ export function applySpaceAggregation(
|
||||
([timestamp, values]) => {
|
||||
let aggregatedValue: number;
|
||||
|
||||
switch (metricInspectionOptions.spaceAggregationOption) {
|
||||
switch (appliedMetricInspectionOptions.spaceAggregationOption) {
|
||||
case SpaceAggregationOptions.SUM_BY:
|
||||
aggregatedValue = values.reduce((sum, val) => sum + val, 0);
|
||||
break;
|
||||
@@ -714,11 +703,11 @@ export function getTimeSeriesLabel(
|
||||
export function HoverPopover({
|
||||
options,
|
||||
step,
|
||||
metricInspectionOptions,
|
||||
appliedMetricInspectionOptions,
|
||||
}: {
|
||||
options: GraphPopoverOptions;
|
||||
step: InspectionStep;
|
||||
metricInspectionOptions: MetricInspectionOptions;
|
||||
appliedMetricInspectionOptions: InspectOptions;
|
||||
}): JSX.Element {
|
||||
const closestTimestamp = useMemo(() => {
|
||||
if (!options.timeSeries) {
|
||||
@@ -746,7 +735,7 @@ export function HoverPopover({
|
||||
const title = useMemo(() => {
|
||||
if (
|
||||
step === InspectionStep.COMPLETED &&
|
||||
metricInspectionOptions.spaceAggregationLabels.length === 0
|
||||
appliedMetricInspectionOptions.spaceAggregationLabels.length === 0
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
@@ -760,7 +749,7 @@ export function HoverPopover({
|
||||
options.timeSeries,
|
||||
options.timeSeries?.strokeColor,
|
||||
);
|
||||
}, [step, options.timeSeries, metricInspectionOptions]);
|
||||
}, [step, options.timeSeries, appliedMetricInspectionOptions]);
|
||||
|
||||
return (
|
||||
<Card
|
||||
@@ -830,3 +819,26 @@ export function onGraphHover(
|
||||
timeSeries: series,
|
||||
});
|
||||
}
|
||||
|
||||
export function useMetricName(
|
||||
metricName: string | null,
|
||||
): {
|
||||
currentMetricName: string | null;
|
||||
setCurrentMetricName: (metricName: string | null) => void;
|
||||
appliedMetricName: string | null;
|
||||
setAppliedMetricName: (metricName: string | null) => void;
|
||||
} {
|
||||
const [currentMetricName, setCurrentMetricName] = useState<string | null>(
|
||||
metricName,
|
||||
);
|
||||
const [appliedMetricName, setAppliedMetricName] = useState<string | null>(
|
||||
metricName,
|
||||
);
|
||||
|
||||
return {
|
||||
currentMetricName,
|
||||
setCurrentMetricName,
|
||||
appliedMetricName,
|
||||
setAppliedMetricName,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,27 +1,97 @@
|
||||
import { Tooltip } from 'antd';
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||
import { convertExpressionToFilters } from 'components/QueryBuilderV2/utils';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { HardHat, Info } from 'lucide-react';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { Info, Play } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { MetricsSearchProps } from './types';
|
||||
|
||||
function MetricsSearch({ query, onChange }: MetricsSearchProps): JSX.Element {
|
||||
function MetricsSearch({ onChange, query }: MetricsSearchProps): JSX.Element {
|
||||
const [contextQuery, setContextQuery] = useState<IBuilderQuery | undefined>(
|
||||
query,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
setContextQuery(query);
|
||||
}, [query]);
|
||||
|
||||
const handleRunQuery = (expression: string): void => {
|
||||
let updatedContextQuery = cloneDeep(contextQuery);
|
||||
if (!updatedContextQuery) {
|
||||
return;
|
||||
}
|
||||
|
||||
const newFilters: TagFilter = {
|
||||
items: expression ? convertExpressionToFilters(expression) : [],
|
||||
op: 'AND',
|
||||
};
|
||||
updatedContextQuery = {
|
||||
...updatedContextQuery,
|
||||
filter: {
|
||||
...updatedContextQuery.filter,
|
||||
expression,
|
||||
},
|
||||
filters: {
|
||||
...updatedContextQuery.filters,
|
||||
...newFilters,
|
||||
op: updatedContextQuery.filters?.op ?? 'AND',
|
||||
},
|
||||
};
|
||||
setContextQuery(updatedContextQuery);
|
||||
|
||||
onChange(newFilters);
|
||||
};
|
||||
|
||||
const handleOnChange = (expression: string): void => {
|
||||
let updatedContextQuery = cloneDeep(contextQuery);
|
||||
if (updatedContextQuery) {
|
||||
updatedContextQuery = {
|
||||
...updatedContextQuery,
|
||||
filter: {
|
||||
...updatedContextQuery.filter,
|
||||
expression,
|
||||
},
|
||||
};
|
||||
setContextQuery(updatedContextQuery);
|
||||
}
|
||||
};
|
||||
|
||||
const handleStageAndRunQuery = (): void =>
|
||||
handleRunQuery(contextQuery?.filter?.expression || '');
|
||||
return (
|
||||
<div className="metrics-search-container">
|
||||
<div className="qb-search-container">
|
||||
<div data-testid="qb-search-container" className="qb-search-container">
|
||||
<Tooltip
|
||||
title="Use filters to refine metrics based on attributes. Example: service_name=api - Shows all metrics associated with the API service"
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
onChange={onChange}
|
||||
suffixIcon={<HardHat size={16} />}
|
||||
isMetricsExplorer
|
||||
/>
|
||||
{contextQuery && (
|
||||
<QuerySearch
|
||||
onChange={handleOnChange}
|
||||
dataSource={DataSource.METRICS}
|
||||
queryData={contextQuery}
|
||||
onRun={handleRunQuery}
|
||||
isMetricsExplorer
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleStageAndRunQuery}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
<div className="metrics-search-options">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
|
||||
.metrics-search-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 16px;
|
||||
|
||||
.metrics-search-options {
|
||||
|
||||
@@ -3,6 +3,7 @@ import './Summary.styles.scss';
|
||||
|
||||
import * as Sentry from '@sentry/react';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { convertFiltersToExpression } from 'components/QueryBuilderV2/utils';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
@@ -184,6 +185,7 @@ function Summary(): JSX.Element {
|
||||
() => ({
|
||||
...initialQueriesMap.metrics.builder.queryData[0],
|
||||
filters: queryFilters,
|
||||
filter: convertFiltersToExpression(queryFilters),
|
||||
}),
|
||||
[queryFilters],
|
||||
);
|
||||
@@ -290,18 +292,11 @@ function Summary(): JSX.Element {
|
||||
],
|
||||
);
|
||||
|
||||
console.log({
|
||||
isMetricsListDataEmpty,
|
||||
isMetricsTreeMapDataEmpty,
|
||||
treeMapData,
|
||||
sec: treeMapData?.payload?.data[heatmapView],
|
||||
});
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className="metrics-explorer-summary-tab">
|
||||
<MetricsSearch query={searchQuery} onChange={handleFilterChange} />
|
||||
{isMetricsLoading || isTreeMapLoading ? (
|
||||
{isMetricsLoading && isTreeMapLoading ? (
|
||||
<MetricsLoading />
|
||||
) : isMetricsListDataEmpty && isMetricsTreeMapDataEmpty ? (
|
||||
<NoLogs dataSource={DataSource.METRICS} />
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import MetricsSearch from '../MetricsSearch';
|
||||
|
||||
jest.mock('container/TopNav/DateTimeSelectionV2', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="date-time-selection">DateTime</div>
|
||||
),
|
||||
}));
|
||||
|
||||
const mockQuery: IBuilderQuery = {
|
||||
...initialQueriesMap.metrics.builder.queryData[0],
|
||||
};
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
describe('MetricsSearch', () => {
|
||||
it('should render the search bar, run button and date-time selector', () => {
|
||||
render(<MetricsSearch query={mockQuery} onChange={mockOnChange} />);
|
||||
expect(screen.getByText('DateTime')).toBeInTheDocument();
|
||||
expect(screen.getByText('Stage & Run Query')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('qb-search-container')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call onChange with parsed filters when Stage & Run is clicked and expression is present', () => {
|
||||
render(<MetricsSearch query={mockQuery} onChange={mockOnChange} />);
|
||||
fireEvent.click(screen.getByText('Stage & Run Query'));
|
||||
expect(mockOnChange).toHaveBeenCalledWith({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -266,8 +266,8 @@ export const defaultMoreMenuItems: SidebarItem[] = [
|
||||
itemKey: 'external-apis',
|
||||
},
|
||||
{
|
||||
key: ROUTES.METER_EXPLORER,
|
||||
label: 'Meter Explorer',
|
||||
key: ROUTES.METER,
|
||||
label: 'Cost Meter',
|
||||
icon: <ChartArea size={16} />,
|
||||
isNew: false,
|
||||
isEnabled: false,
|
||||
|
||||
@@ -4,6 +4,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
|
||||
import { getLocalStorageGraphVisibilityState } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
@@ -54,6 +55,7 @@ function TimeSeriesView({
|
||||
isFilterApplied,
|
||||
dataSource,
|
||||
setWarning,
|
||||
panelType = PANEL_TYPES.TIME_SERIES,
|
||||
}: TimeSeriesViewProps): JSX.Element {
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -191,6 +193,7 @@ function TimeSeriesView({
|
||||
maxTimeScale,
|
||||
softMax: null,
|
||||
softMin: null,
|
||||
panelType,
|
||||
tzDate: (timestamp: number) =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value),
|
||||
timezone: timezone.value,
|
||||
@@ -259,6 +262,7 @@ interface TimeSeriesViewProps {
|
||||
isFilterApplied: boolean;
|
||||
dataSource: DataSource;
|
||||
setWarning?: Dispatch<SetStateAction<Warning | undefined>>;
|
||||
panelType?: PANEL_TYPES;
|
||||
}
|
||||
|
||||
TimeSeriesView.defaultProps = {
|
||||
@@ -266,6 +270,7 @@ TimeSeriesView.defaultProps = {
|
||||
yAxisUnit: 'short',
|
||||
error: undefined,
|
||||
setWarning: undefined,
|
||||
panelType: PANEL_TYPES.TIME_SERIES,
|
||||
};
|
||||
|
||||
export default TimeSeriesView;
|
||||
|
||||
@@ -234,7 +234,7 @@ export const routesToSkip = [
|
||||
ROUTES.UN_AUTHORIZED,
|
||||
ROUTES.NOT_FOUND,
|
||||
ROUTES.METER_EXPLORER,
|
||||
ROUTES.METER_EXPLORER_BASE,
|
||||
ROUTES.METER,
|
||||
ROUTES.METER_EXPLORER_VIEWS,
|
||||
ROUTES.SOMETHING_WENT_WRONG,
|
||||
];
|
||||
|
||||
@@ -25,6 +25,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
metricName,
|
||||
signalSource,
|
||||
}: QueryKeyRequestProps,
|
||||
options?: UseQueryOptions<
|
||||
AxiosResponse<QueryKeySuggestionsResponseProps>,
|
||||
@@ -42,6 +43,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
metricName,
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
signalSource,
|
||||
];
|
||||
}, [
|
||||
options?.queryKey,
|
||||
@@ -50,6 +52,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
metricName,
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
signalSource,
|
||||
]);
|
||||
return useQuery<AxiosResponse<QueryKeySuggestionsResponseProps>, AxiosError>({
|
||||
queryKey,
|
||||
@@ -60,6 +63,7 @@ export const useGetQueryKeySuggestions: UseGetQueryKeySuggestions = (
|
||||
metricName,
|
||||
fieldContext,
|
||||
fieldDataType,
|
||||
signalSource,
|
||||
}),
|
||||
...options,
|
||||
});
|
||||
|
||||
@@ -2,19 +2,25 @@ import './MeterExplorer.styles.scss';
|
||||
|
||||
import RouteTab from 'components/RouteTab';
|
||||
import { TabRoutes } from 'components/RouteTab/types';
|
||||
import ROUTES from 'constants/routes';
|
||||
import history from 'lib/history';
|
||||
import { useLocation } from 'react-use';
|
||||
|
||||
import { Explorer, Views } from './constants';
|
||||
import { Explorer, Meter, Views } from './constants';
|
||||
|
||||
function MeterExplorerPage(): JSX.Element {
|
||||
const { pathname } = useLocation();
|
||||
|
||||
const routes: TabRoutes[] = [Explorer, Views];
|
||||
const routes: TabRoutes[] = [Meter, Explorer, Views];
|
||||
|
||||
return (
|
||||
<div className="meter-explorer-page">
|
||||
<RouteTab routes={routes} activeKey={pathname} history={history} />
|
||||
<RouteTab
|
||||
routes={routes}
|
||||
activeKey={pathname}
|
||||
history={history}
|
||||
defaultActiveKey={ROUTES.METER}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { TabRoutes } from 'components/RouteTab/types';
|
||||
import ROUTES from 'constants/routes';
|
||||
import BreakDownPage from 'container/MeterExplorer/Breakdown/BreakDown';
|
||||
import ExplorerPage from 'container/MeterExplorer/Explorer';
|
||||
import { Compass, TowerControl } from 'lucide-react';
|
||||
import SaveView from 'pages/SaveView';
|
||||
@@ -30,3 +31,14 @@ export const Views: TabRoutes = {
|
||||
route: ROUTES.METER_EXPLORER_VIEWS,
|
||||
key: ROUTES.METER_EXPLORER_VIEWS,
|
||||
};
|
||||
|
||||
export const Meter: TabRoutes = {
|
||||
Component: BreakDownPage,
|
||||
name: (
|
||||
<div className="tab-item">
|
||||
<TowerControl size={16} /> Meter
|
||||
</div>
|
||||
),
|
||||
route: ROUTES.METER,
|
||||
key: ROUTES.METER,
|
||||
};
|
||||
|
||||
@@ -59,7 +59,7 @@
|
||||
}
|
||||
|
||||
.signup-page-content {
|
||||
width: 720px;
|
||||
width: 540px;
|
||||
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -102,7 +102,7 @@
|
||||
flex-direction: column;
|
||||
|
||||
.ant-input {
|
||||
width: 60%;
|
||||
width: 100%;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ import afterLogin from 'AppRoutes/utils';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import history from 'lib/history';
|
||||
import { ArrowRight } from 'lucide-react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
@@ -23,7 +22,6 @@ import { FormContainer, Label } from './styles';
|
||||
import { isPasswordNotValidMessage, isPasswordValid } from './utils';
|
||||
|
||||
type FormValues = {
|
||||
firstName: string;
|
||||
email: string;
|
||||
organizationName: string;
|
||||
password: string;
|
||||
@@ -114,10 +112,9 @@ function SignUp(): JSX.Element {
|
||||
|
||||
const signUp = async (values: FormValues): Promise<void> => {
|
||||
try {
|
||||
const { organizationName, password, firstName, email } = values;
|
||||
const { organizationName, password, email } = values;
|
||||
const response = await signUpApi({
|
||||
email,
|
||||
name: firstName,
|
||||
orgDisplayName: organizationName,
|
||||
password,
|
||||
token: params.get('token') || undefined,
|
||||
@@ -142,11 +139,10 @@ function SignUp(): JSX.Element {
|
||||
|
||||
const acceptInvite = async (values: FormValues): Promise<void> => {
|
||||
try {
|
||||
const { password, email, firstName } = values;
|
||||
const { password, email } = values;
|
||||
await accept({
|
||||
password,
|
||||
token: params.get('token') || '',
|
||||
displayName: firstName,
|
||||
});
|
||||
const loginResponse = await loginApi({
|
||||
email,
|
||||
@@ -208,7 +204,6 @@ function SignUp(): JSX.Element {
|
||||
if (!isPasswordValid(values.password)) {
|
||||
logEvent('Account Creation Page - Invalid Password', {
|
||||
email: values.email,
|
||||
name: values.firstName,
|
||||
});
|
||||
setIsPasswordPolicyError(true);
|
||||
setLoading(false);
|
||||
@@ -219,7 +214,6 @@ function SignUp(): JSX.Element {
|
||||
await signUp(values);
|
||||
logEvent('Account Created Successfully', {
|
||||
email: values.email,
|
||||
name: values.firstName,
|
||||
});
|
||||
} else {
|
||||
await acceptInvite(values);
|
||||
@@ -235,11 +229,6 @@ function SignUp(): JSX.Element {
|
||||
})();
|
||||
};
|
||||
|
||||
const getIsNameVisible = (): boolean =>
|
||||
!(form.getFieldValue('firstName') === 0 && !isSignUp);
|
||||
|
||||
const isNameVisible = getIsNameVisible();
|
||||
|
||||
const handleValuesChange: (changedValues: Partial<FormValues>) => void = (
|
||||
changedValues,
|
||||
) => {
|
||||
@@ -260,7 +249,6 @@ function SignUp(): JSX.Element {
|
||||
loading ||
|
||||
!values.email ||
|
||||
(!precheck.sso && (!values.password || !values.confirmPassword)) ||
|
||||
(!isDetailsDisable && !values.firstName) ||
|
||||
confirmPasswordError ||
|
||||
isPasswordPolicyError
|
||||
);
|
||||
@@ -288,8 +276,8 @@ function SignUp(): JSX.Element {
|
||||
>
|
||||
<div className="signup-form-header">
|
||||
<Typography.Paragraph className="signup-form-header-text">
|
||||
Create your account to monitor, trace, and troubleshoot your applications
|
||||
effortlessly.
|
||||
You're almost in. Create a password to start monitoring your
|
||||
applications with SigNoz.
|
||||
</Typography.Paragraph>
|
||||
</div>
|
||||
|
||||
@@ -307,47 +295,22 @@ function SignUp(): JSX.Element {
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
|
||||
{isNameVisible && (
|
||||
<div className="first-name-container">
|
||||
<Label htmlFor="signupFirstName">Name</Label>{' '}
|
||||
<FormContainer.Item noStyle name="firstName">
|
||||
<Input
|
||||
placeholder="Your Name"
|
||||
required
|
||||
id="signupFirstName"
|
||||
disabled={isDetailsDisable && form.getFieldValue('firstName')}
|
||||
/>
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="org-name-container">
|
||||
<Label htmlFor="organizationName">Organization Name</Label>{' '}
|
||||
<FormContainer.Item noStyle name="organizationName">
|
||||
<Input
|
||||
placeholder="Your Company"
|
||||
id="organizationName"
|
||||
disabled={isDetailsDisable}
|
||||
/>
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
|
||||
{!precheck.sso && (
|
||||
<div className="password-section">
|
||||
<>
|
||||
<div className="password-container">
|
||||
<label htmlFor="Password">Password</label>{' '}
|
||||
<Label htmlFor="currentPassword">Password</Label>
|
||||
<FormContainer.Item noStyle name="password">
|
||||
<Input.Password required id="currentPassword" />
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
|
||||
<div className="password-container">
|
||||
<label htmlFor="ConfirmPassword">Confirm Password</label>{' '}
|
||||
<Label htmlFor="confirmPassword">Confirm Password</Label>
|
||||
<FormContainer.Item noStyle name="confirmPassword">
|
||||
<Input.Password required id="confirmPassword" />
|
||||
</FormContainer.Item>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="password-error-container">
|
||||
@@ -382,9 +345,9 @@ function SignUp(): JSX.Element {
|
||||
loading={loading}
|
||||
disabled={isValidForm()}
|
||||
className="periscope-btn primary next-btn"
|
||||
icon={<ArrowRight size={12} />}
|
||||
block
|
||||
>
|
||||
Sign Up
|
||||
Access My Workspace
|
||||
</Button>
|
||||
</div>
|
||||
</FormContainer>
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
export interface Props {
|
||||
name: string;
|
||||
orgDisplayName: string;
|
||||
email: string;
|
||||
password: string;
|
||||
|
||||
@@ -124,6 +124,6 @@ export const routePermission: Record<keyof typeof ROUTES, ROLES[]> = {
|
||||
API_MONITORING_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
MESSAGING_QUEUES_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER_EXPLORER: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER_EXPLORER_BASE: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
METER_EXPLORER_VIEWS: ['ADMIN', 'EDITOR', 'VIEWER'],
|
||||
};
|
||||
|
||||
@@ -2175,54 +2175,10 @@
|
||||
resolved "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz"
|
||||
integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==
|
||||
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.14.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.17.2", "@babel/runtime@^7.17.8", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.19.0", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.4.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.21.0"
|
||||
resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.21.0.tgz"
|
||||
integrity sha512-xwII0//EObnq89Ji5AKYQaRYiW/nZ3llSv29d49IuxPhKbtJoLP+9QUUZ4nVragQVtaVGeZrpB+ZtG/Pdy/POw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.13.11"
|
||||
|
||||
"@babel/runtime@^7.13.10":
|
||||
version "7.23.6"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.6.tgz#c05e610dc228855dc92ef1b53d07389ed8ab521d"
|
||||
integrity sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.14.6":
|
||||
version "7.22.15"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.22.15.tgz#38f46494ccf6cf020bd4eed7124b425e83e523b8"
|
||||
integrity sha512-T0O+aa+4w0u06iNmapipJXMV4HoUir03hpx3/YqXXhu9xim3w+dVphjFWl1OH8NbZHw5Lbm9k45drDkgq2VNNA==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.18.6":
|
||||
version "7.27.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.27.0.tgz#fbee7cf97c709518ecc1f590984481d5460d4762"
|
||||
integrity sha512-VtPOkrdPHZsKc/clNqyi9WUA8TINkZ4cGk63UUE3u4pmB2k+ZMQRDuIOagv8UVd6j7k0T3+RRIb7beKTebNbcw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2":
|
||||
version "7.23.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.2.tgz#062b0ac103261d68a966c4c7baf2ae3e62ec3885"
|
||||
integrity sha512-mM8eg4yl5D6i3lu2QKPuPH4FArvJ8KhTofbE7jwMUv9KX5mBvwPAqnV3MlyBNqdp9RyRKP6Yck8TrfYrPvX3bg==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.3.1":
|
||||
version "7.23.1"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.1.tgz#72741dc4d413338a91dcb044a86f3c0bc402646d"
|
||||
integrity sha512-hC2v6p8ZSI/W0HUzh3V8C5g+NwSKzKPtJwSpTjwl0o297GP9+ZLQSkdvHz46CM3LqyoXxq+5G9komY+eSqSO0g==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
|
||||
"@babel/runtime@^7.7.6":
|
||||
version "7.26.0"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1"
|
||||
integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.14.0"
|
||||
"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.1", "@babel/runtime@^7.10.4", "@babel/runtime@^7.11.1", "@babel/runtime@^7.11.2", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.14.5", "@babel/runtime@^7.14.6", "@babel/runtime@^7.15.4", "@babel/runtime@^7.16.3", "@babel/runtime@^7.16.7", "@babel/runtime@^7.17.2", "@babel/runtime@^7.17.8", "@babel/runtime@^7.18.0", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.6", "@babel/runtime@^7.19.0", "@babel/runtime@^7.20.0", "@babel/runtime@^7.20.7", "@babel/runtime@^7.21.0", "@babel/runtime@^7.22.5", "@babel/runtime@^7.23.2", "@babel/runtime@^7.3.1", "@babel/runtime@^7.4.2", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.2", "@babel/runtime@^7.7.2", "@babel/runtime@^7.7.6", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2":
|
||||
version "7.28.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.28.2.tgz#2ae5a9d51cc583bd1f5673b3bb70d6d819682473"
|
||||
integrity sha512-KHp2IflsnGywDjBWDkR9iEqiWSpc8GIi0lgTT3mOElT0PP1tG26P4tmFI2YvAdzgq9RGyoHZQEIEdZy6Ec5xCA==
|
||||
|
||||
"@babel/template@^7.18.10", "@babel/template@^7.20.7", "@babel/template@^7.3.3":
|
||||
version "7.20.7"
|
||||
@@ -15291,16 +15247,6 @@ regenerator-runtime@^0.11.0:
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
|
||||
integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
|
||||
|
||||
regenerator-runtime@^0.13.11:
|
||||
version "0.13.11"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9"
|
||||
integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==
|
||||
|
||||
regenerator-runtime@^0.14.0:
|
||||
version "0.14.0"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45"
|
||||
integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA==
|
||||
|
||||
regenerator-transform@^0.15.1:
|
||||
version "0.15.1"
|
||||
resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.1.tgz"
|
||||
|
||||
@@ -110,6 +110,10 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any
|
||||
|
||||
if req.RequestType == qbtypes.RequestTypeTimeSeries && req.FormatOptions != nil && req.FormatOptions.FillGaps {
|
||||
for name := range typedResults {
|
||||
if req.SkipFillGaps(name) {
|
||||
continue
|
||||
}
|
||||
|
||||
funcs := []qbtypes.Function{{Name: qbtypes.FunctionNameFillZero}}
|
||||
funcs = q.prepareFillZeroArgsWithStep(funcs, req, req.StepIntervalForQuery(name))
|
||||
// empty time series if it doesn't exist
|
||||
|
||||
@@ -23,6 +23,10 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
var (
|
||||
intervalWarn = "Query %s is requesting aggregation interval %v seconds, which is smaller than the minimum allowed interval of %v seconds for selected time range. Using the minimum instead"
|
||||
)
|
||||
|
||||
type querier struct {
|
||||
logger *slog.Logger
|
||||
telemetryStore telemetrystore.TelemetryStore
|
||||
@@ -121,6 +125,8 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
PanelType: req.RequestType.StringValue(),
|
||||
}
|
||||
|
||||
intervalWarnings := []string{}
|
||||
|
||||
// First pass: collect all metric names that need temporality
|
||||
metricNames := make([]string, 0)
|
||||
for idx, query := range req.CompositeQuery.Queries {
|
||||
@@ -147,9 +153,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
|
||||
@@ -162,9 +170,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepInterval(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepInterval(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
|
||||
@@ -181,9 +191,11 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
}
|
||||
}
|
||||
if spec.StepInterval.Seconds() < float64(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)) {
|
||||
spec.StepInterval = qbtypes.Step{
|
||||
newStep := qbtypes.Step{
|
||||
Duration: time.Second * time.Duration(querybuilder.MinAllowedStepIntervalForMetric(req.Start, req.End)),
|
||||
}
|
||||
intervalWarnings = append(intervalWarnings, fmt.Sprintf(intervalWarn, spec.Name, spec.StepInterval.Seconds(), newStep.Duration.Seconds()))
|
||||
spec.StepInterval = newStep
|
||||
}
|
||||
}
|
||||
req.CompositeQuery.Queries[idx].Spec = spec
|
||||
@@ -290,6 +302,16 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
|
||||
qbResp, qbErr := q.run(ctx, orgID, queries, req, steps, event)
|
||||
if qbResp != nil {
|
||||
qbResp.QBEvent = event
|
||||
if len(intervalWarnings) != 0 && req.RequestType == qbtypes.RequestTypeTimeSeries {
|
||||
if qbResp.Warning == nil {
|
||||
qbResp.Warning = &qbtypes.QueryWarnData{
|
||||
Warnings: make([]qbtypes.QueryWarnDataAdditional, len(intervalWarnings)),
|
||||
}
|
||||
for idx := range intervalWarnings {
|
||||
qbResp.Warning.Warnings[idx] = qbtypes.QueryWarnDataAdditional{Message: intervalWarnings[idx]}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return qbResp, qbErr
|
||||
}
|
||||
|
||||
@@ -385,7 +385,7 @@ func (r *ClickHouseReader) buildResourceSubQuery(tags []model.TagQueryParam, svc
|
||||
return resourceSubQuery, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetServicesOG(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
|
||||
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
|
||||
|
||||
if r.indexTable == "" {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
|
||||
@@ -428,7 +428,7 @@ func (r *ClickHouseReader) GetServicesOG(ctx context.Context, queryParams *model
|
||||
|
||||
query := fmt.Sprintf(
|
||||
`SELECT
|
||||
toFloat64(quantileExact(0.99)(duration_nano)) as p99,
|
||||
quantile(0.99)(duration_nano) as p99,
|
||||
avg(duration_nano) as avgDuration,
|
||||
count(*) as numCalls
|
||||
FROM %s.%s
|
||||
@@ -510,274 +510,6 @@ func (r *ClickHouseReader) GetServicesOG(ctx context.Context, queryParams *model
|
||||
return &serviceItems, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetServices(ctx context.Context, queryParams *model.GetServicesParams) (*[]model.ServiceItem, *model.ApiError) {
|
||||
if r.indexTable == "" {
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: ErrNoIndexTable}
|
||||
}
|
||||
|
||||
topLevelOps, apiErr := r.GetTopLevelOperations(ctx, *queryParams.Start, *queryParams.End, nil)
|
||||
if apiErr != nil {
|
||||
return nil, apiErr
|
||||
}
|
||||
// Build parallel arrays for arrayZip approach
|
||||
var ops []string
|
||||
var svcs []string
|
||||
serviceOperationsMap := make(map[string][]string)
|
||||
|
||||
for svc, opsList := range *topLevelOps {
|
||||
// Cap operations to 1500 per service (same as original logic)
|
||||
cappedOps := opsList[:int(math.Min(1500, float64(len(opsList))))]
|
||||
serviceOperationsMap[svc] = cappedOps
|
||||
|
||||
// Add to parallel arrays
|
||||
for _, op := range cappedOps {
|
||||
ops = append(ops, op)
|
||||
svcs = append(svcs, svc)
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf("Operation pairs count: %d\n", len(ops))
|
||||
|
||||
// Build resource subquery for all services, but only include our target services
|
||||
targetServices := make([]string, 0, len(*topLevelOps))
|
||||
for svc := range *topLevelOps {
|
||||
targetServices = append(targetServices, svc)
|
||||
}
|
||||
resourceSubQuery, err := r.buildResourceSubQueryForServices(queryParams.Tags, targetServices, *queryParams.Start, *queryParams.End)
|
||||
if err != nil {
|
||||
zap.L().Error("Error building resource subquery", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
// Build the optimized single query using arrayZip for tuple creation
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
resource_string_service$$name AS serviceName,
|
||||
toFloat64(quantileExact(0.99)(duration_nano)) AS p99,
|
||||
avg(duration_nano) AS avgDuration,
|
||||
count(*) AS numCalls,
|
||||
countIf(statusCode = 2) AS numErrors
|
||||
FROM %s.%s
|
||||
WHERE (name, resource_string_service$$name) IN arrayZip(@ops, @svcs)
|
||||
AND timestamp >= @start
|
||||
AND timestamp <= @end
|
||||
AND ts_bucket_start >= @start_bucket
|
||||
AND ts_bucket_start <= @end_bucket
|
||||
AND (resource_fingerprint GLOBAL IN %s)
|
||||
GROUP BY serviceName
|
||||
ORDER BY numCalls DESC`,
|
||||
r.TraceDB, r.traceTableName, resourceSubQuery,
|
||||
)
|
||||
|
||||
args := []interface{}{
|
||||
clickhouse.Named("start", strconv.FormatInt(queryParams.Start.UnixNano(), 10)),
|
||||
clickhouse.Named("end", strconv.FormatInt(queryParams.End.UnixNano(), 10)),
|
||||
clickhouse.Named("start_bucket", strconv.FormatInt(queryParams.Start.Unix()-1800, 10)),
|
||||
clickhouse.Named("end_bucket", strconv.FormatInt(queryParams.End.Unix(), 10)),
|
||||
// Important: wrap slices with clickhouse.Array for IN/array params
|
||||
clickhouse.Named("ops", ops),
|
||||
clickhouse.Named("svcs", svcs),
|
||||
}
|
||||
|
||||
fmt.Printf("Query: %s\n", query)
|
||||
|
||||
// Execute the single optimized query
|
||||
rows, err := r.db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
zap.L().Error("Error executing optimized services query", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
// Process results
|
||||
serviceItems := []model.ServiceItem{}
|
||||
|
||||
for rows.Next() {
|
||||
var serviceItem model.ServiceItem
|
||||
err := rows.ScanStruct(&serviceItem)
|
||||
if err != nil {
|
||||
zap.L().Error("Error scanning service item", zap.Error(err))
|
||||
continue
|
||||
}
|
||||
|
||||
// Skip services with zero calls (match original behavior)
|
||||
if serviceItem.NumCalls == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Add data warning for this service
|
||||
if ops, exists := serviceOperationsMap[serviceItem.ServiceName]; exists {
|
||||
serviceItem.DataWarning = model.DataWarning{
|
||||
TopLevelOps: ops,
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate derived fields
|
||||
serviceItem.CallRate = float64(serviceItem.NumCalls) / float64(queryParams.Period)
|
||||
if serviceItem.NumCalls > 0 {
|
||||
serviceItem.ErrorRate = float64(serviceItem.NumErrors) * 100 / float64(serviceItem.NumCalls)
|
||||
}
|
||||
|
||||
serviceItems = append(serviceItems, serviceItem)
|
||||
}
|
||||
|
||||
if err = rows.Err(); err != nil {
|
||||
zap.L().Error("Error iterating over service results", zap.Error(err))
|
||||
return nil, &model.ApiError{Typ: model.ErrorExec, Err: err}
|
||||
}
|
||||
|
||||
// Fetch results from the original GetServicesOG for comparison
|
||||
ogResults, ogErr := r.GetServicesOG(ctx, queryParams)
|
||||
if ogErr != nil {
|
||||
zap.L().Error("Error fetching OG service results", zap.Error(ogErr))
|
||||
} else {
|
||||
// Compare the optimized results with OG results
|
||||
ogMap := make(map[string]model.ServiceItem)
|
||||
for _, ogItem := range *ogResults {
|
||||
ogMap[ogItem.ServiceName] = ogItem
|
||||
}
|
||||
|
||||
for _, optItem := range serviceItems {
|
||||
if ogItem, exists := ogMap[optItem.ServiceName]; exists {
|
||||
// Compare key fields (NumCalls, NumErrors, etc.)
|
||||
if optItem.NumCalls != ogItem.NumCalls ||
|
||||
optItem.NumErrors != ogItem.NumErrors ||
|
||||
int64(optItem.Percentile99) != int64(ogItem.Percentile99) ||
|
||||
int64(optItem.AvgDuration) != int64(ogItem.AvgDuration) {
|
||||
fmt.Printf(
|
||||
"[Discrepancy] Service: %s | optNumCalls: %d, ogNumCalls: %d | optNumErrors: %d, ogNumErrors: %d | optP99: %.2f, ogP99: %.2f | optAvgDuration: %.2f, ogAvgDuration: %.2f\n",
|
||||
optItem.ServiceName,
|
||||
optItem.NumCalls, ogItem.NumCalls,
|
||||
optItem.NumErrors, ogItem.NumErrors,
|
||||
optItem.Percentile99, ogItem.Percentile99,
|
||||
optItem.AvgDuration, ogItem.AvgDuration,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
zap.L().Warn("Service present in optimized results but missing in OG results",
|
||||
zap.String("service", optItem.ServiceName))
|
||||
}
|
||||
}
|
||||
|
||||
// Check for services present in OG but missing in optimized
|
||||
optMap := make(map[string]struct{})
|
||||
for _, optItem := range serviceItems {
|
||||
optMap[optItem.ServiceName] = struct{}{}
|
||||
}
|
||||
for _, ogItem := range *ogResults {
|
||||
if _, exists := optMap[ogItem.ServiceName]; !exists {
|
||||
fmt.Printf("Service present in OG results but missing in optimized results: %s\n", ogItem.ServiceName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return &serviceItems, nil
|
||||
}
|
||||
|
||||
// buildResourceSubQueryForServices builds a resource subquery that includes only specific services
|
||||
// This maintains service context while optimizing for multiple services in a single query
|
||||
func (r *ClickHouseReader) buildResourceSubQueryForServices(tags []model.TagQueryParam, targetServices []string, start, end time.Time) (string, error) {
|
||||
if len(targetServices) == 0 {
|
||||
return "", fmt.Errorf("no target services provided")
|
||||
}
|
||||
|
||||
if len(tags) == 0 {
|
||||
// For exact parity with per-service behavior, build via resource builder with only service filter
|
||||
filterSet := v3.FilterSet{}
|
||||
filterSet.Items = append(filterSet.Items, v3.FilterItem{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "service.name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: v3.FilterOperatorIn,
|
||||
Value: targetServices,
|
||||
})
|
||||
|
||||
resourceSubQuery, err := resource.BuildResourceSubQuery(
|
||||
r.TraceDB,
|
||||
r.traceResourceTableV3,
|
||||
start.Unix()-1800,
|
||||
end.Unix(),
|
||||
&filterSet,
|
||||
[]v3.AttributeKey{},
|
||||
v3.AttributeKey{},
|
||||
false)
|
||||
if err != nil {
|
||||
zap.L().Error("Error building resource subquery for services", zap.Error(err))
|
||||
return "", err
|
||||
}
|
||||
return resourceSubQuery, nil
|
||||
}
|
||||
|
||||
// Convert tags to filter set
|
||||
filterSet := v3.FilterSet{}
|
||||
for _, tag := range tags {
|
||||
// Skip the collector id as we don't add it to traces
|
||||
if tag.Key == "signoz.collector.id" {
|
||||
continue
|
||||
}
|
||||
|
||||
var it v3.FilterItem
|
||||
it.Key = v3.AttributeKey{
|
||||
Key: tag.Key,
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
}
|
||||
|
||||
switch tag.Operator {
|
||||
case model.NotInOperator:
|
||||
it.Operator = v3.FilterOperatorNotIn
|
||||
it.Value = tag.StringValues
|
||||
case model.InOperator:
|
||||
it.Operator = v3.FilterOperatorIn
|
||||
it.Value = tag.StringValues
|
||||
default:
|
||||
return "", fmt.Errorf("operator %s not supported", tag.Operator)
|
||||
}
|
||||
|
||||
filterSet.Items = append(filterSet.Items, it)
|
||||
}
|
||||
|
||||
// Add service filter to limit to our target services
|
||||
filterSet.Items = append(filterSet.Items, v3.FilterItem{
|
||||
Key: v3.AttributeKey{
|
||||
Key: "service.name",
|
||||
DataType: v3.AttributeKeyDataTypeString,
|
||||
Type: v3.AttributeKeyTypeResource,
|
||||
},
|
||||
Operator: v3.FilterOperatorIn,
|
||||
Value: targetServices,
|
||||
})
|
||||
|
||||
// Build resource subquery with service-specific filtering
|
||||
resourceSubQuery, err := resource.BuildResourceSubQuery(
|
||||
r.TraceDB,
|
||||
r.traceResourceTableV3,
|
||||
start.Unix()-1800,
|
||||
end.Unix(),
|
||||
&filterSet,
|
||||
[]v3.AttributeKey{},
|
||||
v3.AttributeKey{},
|
||||
false)
|
||||
if err != nil {
|
||||
zap.L().Error("Error building resource subquery for services", zap.Error(err))
|
||||
return "", err
|
||||
}
|
||||
return resourceSubQuery, nil
|
||||
}
|
||||
|
||||
// buildServiceInClause creates a properly quoted IN clause for service names
|
||||
func (r *ClickHouseReader) buildServiceInClause(services []string) string {
|
||||
var quotedServices []string
|
||||
for _, svc := range services {
|
||||
// Escape single quotes and wrap in quotes
|
||||
escapedSvc := strings.ReplaceAll(svc, "'", "\\'")
|
||||
quotedServices = append(quotedServices, fmt.Sprintf("'%s'", escapedSvc))
|
||||
}
|
||||
return strings.Join(quotedServices, ", ")
|
||||
}
|
||||
|
||||
func getStatusFilters(query string, statusParams []string, excludeMap map[string]struct{}) string {
|
||||
// status can only be two and if both are selected than they are equivalent to none selected
|
||||
if _, ok := excludeMap["status"]; ok {
|
||||
@@ -797,6 +529,7 @@ func getStatusFilters(query string, statusParams []string, excludeMap map[string
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
func createTagQueryFromTagQueryParams(queryParams []model.TagQueryParam) []model.TagQuery {
|
||||
tags := []model.TagQuery{}
|
||||
for _, tag := range queryParams {
|
||||
@@ -953,6 +686,7 @@ func addExistsOperator(item model.TagQuery, tagMapType string, not bool) (string
|
||||
}
|
||||
return fmt.Sprintf(" AND %s (%s)", notStr, strings.Join(tagOperatorPair, " OR ")), args
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetEntryPointOperations(ctx context.Context, queryParams *model.GetTopOperationsParams) (*[]model.TopOperationsItem, error) {
|
||||
// Step 1: Get top operations for the given service
|
||||
topOps, err := r.GetTopOperations(ctx, queryParams)
|
||||
@@ -1021,9 +755,9 @@ func (r *ClickHouseReader) GetTopOperations(ctx context.Context, queryParams *mo
|
||||
|
||||
query := fmt.Sprintf(`
|
||||
SELECT
|
||||
toFloat64(quantileExact(0.5)(durationNano)) as p50,
|
||||
toFloat64(quantileExact(0.95)(durationNano)) as p95,
|
||||
toFloat64(quantileExact(0.99)(durationNano)) as p99,
|
||||
quantile(0.5)(durationNano) as p50,
|
||||
quantile(0.95)(durationNano) as p95,
|
||||
quantile(0.99)(durationNano) as p99,
|
||||
COUNT(*) as numCalls,
|
||||
countIf(status_code=2) as errorCount,
|
||||
name
|
||||
@@ -1505,11 +1239,11 @@ func (r *ClickHouseReader) GetDependencyGraph(ctx context.Context, queryParams *
|
||||
SELECT
|
||||
src as parent,
|
||||
dest as child,
|
||||
toFloat64(result[1]) AS p50,
|
||||
toFloat64(result[2]) AS p75,
|
||||
toFloat64(result[3]) AS p90,
|
||||
toFloat64(result[4]) AS p95,
|
||||
toFloat64(result[5]) AS p99,
|
||||
result[1] AS p50,
|
||||
result[2] AS p75,
|
||||
result[3] AS p90,
|
||||
result[4] AS p95,
|
||||
result[5] AS p99,
|
||||
sum(total_count) as callCount,
|
||||
sum(total_count)/ @duration AS callRate,
|
||||
sum(error_count)/sum(total_count) * 100 as errorRate
|
||||
@@ -1541,6 +1275,7 @@ func getLocalTableName(tableName string) string {
|
||||
return tableNameSplit[0] + "." + strings.Split(tableNameSplit[1], "distributed_")[1]
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
// uuid is used as transaction id
|
||||
uuidWithHyphen := uuid.New()
|
||||
@@ -1681,6 +1416,7 @@ func (r *ClickHouseReader) setTTLLogs(ctx context.Context, orgID string, params
|
||||
}(ttlPayload)
|
||||
return &model.SetTTLResponseItem{Message: "move ttl has been successfully set up"}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) setTTLTraces(ctx context.Context, orgID string, params *model.TTLParams) (*model.SetTTLResponseItem, *model.ApiError) {
|
||||
// uuid is used as transaction id
|
||||
uuidWithHyphen := uuid.New()
|
||||
@@ -2321,6 +2057,7 @@ func (r *ClickHouseReader) ListErrors(ctx context.Context, queryParams *model.Li
|
||||
|
||||
return &getErrorResponses, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) CountErrors(ctx context.Context, queryParams *model.CountErrorsParams) (uint64, *model.ApiError) {
|
||||
|
||||
var errorCount uint64
|
||||
@@ -2432,6 +2169,7 @@ func (r *ClickHouseReader) GetNextPrevErrorIDs(ctx context.Context, queryParams
|
||||
return &getNextPrevErrorIDsResponse, nil
|
||||
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) getNextErrorID(ctx context.Context, queryParams *model.GetErrorParams) (string, time.Time, *model.ApiError) {
|
||||
|
||||
var getNextErrorIDReponse []model.NextPrevErrorIDsDBResponse
|
||||
@@ -3092,6 +2830,7 @@ func (r *ClickHouseReader) GetMetricAttributeKeys(ctx context.Context, req *v3.F
|
||||
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMeterAttributeKeys(ctx context.Context, req *v3.FilterAttributeKeyRequest) (*v3.FilterAttributeKeyResponse, error) {
|
||||
var query string
|
||||
var err error
|
||||
@@ -3166,6 +2905,7 @@ func (r *ClickHouseReader) GetMetricAttributeValues(ctx context.Context, req *v3
|
||||
|
||||
return &attributeValues, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricMetadata(ctx context.Context, orgID valuer.UUID, metricName, serviceName string) (*v3.MetricMetadataResponse, error) {
|
||||
|
||||
unixMilli := common.PastDayRoundOff()
|
||||
@@ -3837,6 +3577,7 @@ func readRow(vars []interface{}, columnNames []string, countOfNumberCols int) ([
|
||||
}
|
||||
return groupBy, groupAttributes, groupAttributesArray, nil
|
||||
}
|
||||
|
||||
func readRowsForTimeSeriesResult(rows driver.Rows, vars []interface{}, columnNames []string, countOfNumberCols int) ([]*v3.Series, error) {
|
||||
// when groupBy is applied, each combination of cartesian product
|
||||
// of attribute values is a separate series. Each item in seriesToPoints
|
||||
@@ -4632,6 +4373,7 @@ func (r *ClickHouseReader) ReadRuleStateHistoryByRuleID(
|
||||
|
||||
return timeline, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) ReadRuleStateHistoryTopContributorsByRuleID(
|
||||
ctx context.Context, ruleID string, params *model.QueryRuleStateHistory) ([]model.RuleStateHistoryContributor, error) {
|
||||
query := fmt.Sprintf(`SELECT
|
||||
@@ -5220,6 +4962,7 @@ func (r *ClickHouseReader) GetActiveTimeSeriesForMetricName(ctx context.Context,
|
||||
}
|
||||
return timeSeries, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.UUID, req *metrics_explorer.SummaryListMetricsRequest) (*metrics_explorer.SummaryListMetricsResponse, *model.ApiError) {
|
||||
var args []interface{}
|
||||
|
||||
@@ -5437,6 +5180,7 @@ func (r *ClickHouseReader) ListSummaryMetrics(ctx context.Context, orgID valuer.
|
||||
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetMetricsTimeSeriesPercentage(ctx context.Context, req *metrics_explorer.TreeMapMetricsRequest) (*[]metrics_explorer.TreeMapResponseItem, *model.ApiError) {
|
||||
var args []interface{}
|
||||
|
||||
@@ -6016,6 +5760,7 @@ func (r *ClickHouseReader) GetInspectMetrics(ctx context.Context, req *metrics_e
|
||||
Series: &seriesList,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetInspectMetricsFingerprints(ctx context.Context, attributes []string, req *metrics_explorer.InspectMetricsRequest) ([]string, *model.ApiError) {
|
||||
// Build dynamic key selections and JSON extracts
|
||||
var jsonExtracts []string
|
||||
@@ -6188,6 +5933,7 @@ func (r *ClickHouseReader) CheckForLabelsInMetric(ctx context.Context, metricNam
|
||||
}
|
||||
return hasLE, nil
|
||||
}
|
||||
|
||||
func (r *ClickHouseReader) GetUpdatedMetricsMetadata(ctx context.Context, orgID valuer.UUID, metricNames ...string) (map[string]*model.UpdateMetricsMetadata, *model.ApiError) {
|
||||
cachedMetadata := make(map[string]*model.UpdateMetricsMetadata)
|
||||
var missingMetrics []string
|
||||
|
||||
@@ -220,7 +220,7 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i := 0; i < len(args)-1; i++ {
|
||||
origVal := args[i].String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(origVal)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", origVal)
|
||||
}
|
||||
@@ -238,9 +238,9 @@ func (v *exprVisitor) VisitFunctionExpr(fn *chparser.FunctionExpr) error {
|
||||
for i, arg := range args {
|
||||
orig := arg.String()
|
||||
fieldKey := telemetrytypes.GetFieldKeyFromKeyText(orig)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType)
|
||||
expr, exprArgs, err := CollisionHandledFinalExpr(context.Background(), &fieldKey, v.fieldMapper, v.conditionBuilder, v.fieldKeys, dataType, v.jsonBodyPrefix, v.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to get table field name for %q", orig)
|
||||
return err
|
||||
}
|
||||
v.chArgs = append(v.chArgs, exprArgs...)
|
||||
newCol := expr
|
||||
|
||||
@@ -23,6 +23,8 @@ func CollisionHandledFinalExpr(
|
||||
cb qbtypes.ConditionBuilder,
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
requiredDataType telemetrytypes.FieldDataType,
|
||||
jsonBodyPrefix string,
|
||||
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
|
||||
) (string, []any, error) {
|
||||
|
||||
if requiredDataType != telemetrytypes.FieldDataTypeString &&
|
||||
@@ -100,7 +102,15 @@ func CollisionHandledFinalExpr(
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(field, dummyValue, colName)
|
||||
|
||||
if strings.HasPrefix(field.Name, jsonBodyPrefix) && jsonBodyPrefix != "" && jsonKeyToKey != nil {
|
||||
// TODO(nitya): enable group by on body column?
|
||||
return "", nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "Group by/Aggregation isn't available for the body column")
|
||||
// colName, _ = jsonKeyToKey(context.Background(), field, qbtypes.FilterOperatorUnknown, dummyValue)
|
||||
} else {
|
||||
colName, _ = telemetrytypes.DataTypeCollisionHandledFieldName(field, dummyValue, colName)
|
||||
}
|
||||
|
||||
stmts = append(stmts, colName)
|
||||
}
|
||||
|
||||
|
||||
@@ -43,13 +43,16 @@ func QueryStringToKeysSelectors(query string) []*telemetrytypes.FieldKeySelector
|
||||
FieldDataType: key.FieldDataType,
|
||||
})
|
||||
|
||||
if key.FieldContext != telemetrytypes.FieldContextUnspecified {
|
||||
if key.FieldContext == telemetrytypes.FieldContextLog ||
|
||||
key.FieldContext == telemetrytypes.FieldContextSpan ||
|
||||
key.FieldContext == telemetrytypes.FieldContextMetric ||
|
||||
key.FieldContext == telemetrytypes.FieldContextTrace {
|
||||
// span.kind in metrics or metric.max_count in span etc.. should get the search on span.kind
|
||||
// see note in where_clause_visitor.go in VisitKey(...)
|
||||
keys = append(keys, &telemetrytypes.FieldKeySelector{
|
||||
Name: key.FieldContext.StringValue() + "." + key.Name,
|
||||
Signal: key.Signal,
|
||||
FieldContext: key.FieldContext,
|
||||
FieldContext: telemetrytypes.FieldContextAttribute, // do not keep the original context because this is attribute
|
||||
FieldDataType: key.FieldDataType,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -32,12 +32,6 @@ func TestQueryToKeys(t *testing.T) {
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
|
||||
},
|
||||
{
|
||||
Name: "resource.service.name",
|
||||
Signal: telemetrytypes.SignalUnspecified,
|
||||
FieldContext: telemetrytypes.FieldContextResource,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeUnspecified,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -303,7 +303,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -449,7 +449,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
|
||||
var allGroupByArgs []any
|
||||
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonBodyPrefix, b.jsonKeyToKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -355,3 +355,77 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
expected qbtypes.Statement
|
||||
expectedErrContains string
|
||||
}{
|
||||
{
|
||||
name: "Time series with limit and body group by",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.LogAggregation{
|
||||
{
|
||||
Expression: "count()",
|
||||
},
|
||||
},
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body.status",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedErrContains: "Group by/Aggregation isn't available for the body column",
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, "", nil)
|
||||
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
BodyJSONStringSearchPrefix,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErrContains != "" {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErrContains)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,7 +202,8 @@ func (t *telemetryMetaStore) getTracesKeys(ctx context.Context, fieldKeySelector
|
||||
conds = append(conds, sb.And(fieldKeyConds...))
|
||||
limit += fieldKeySelector.Limit
|
||||
}
|
||||
sb.Where(sb.Or(conds...))
|
||||
// the span_attribute_keys has historically pushed the top level column as attributes
|
||||
sb.Where(sb.Or(conds...)).Where("isColumn = false")
|
||||
sb.GroupBy("tagKey", "tagType", "dataType")
|
||||
if limit == 0 {
|
||||
limit = 1000
|
||||
@@ -403,7 +404,7 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
|
||||
sb := sqlbuilder.Select(
|
||||
"name AS tag_key",
|
||||
fmt.Sprintf("'%s' AS tag_type", fieldContext.TagType()),
|
||||
"datatype AS tag_data_type",
|
||||
"lower(datatype) AS tag_data_type", // in logs, we had some historical data with capital and small case
|
||||
fmt.Sprintf(`%d AS priority`, getPriorityForContext(fieldContext)),
|
||||
).From(tblName)
|
||||
|
||||
|
||||
@@ -75,8 +75,9 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
|
||||
if b.metricsStatementBuilder.CanShortCircuitDelta(query) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
frag, args := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables)
|
||||
if frag != "" {
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -107,7 +108,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
keys map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
variables map[string]qbtypes.VariableItem,
|
||||
) (string, []any) {
|
||||
) (string, []any, error) {
|
||||
var filterWhere *querybuilder.PreparedWhereClause
|
||||
var err error
|
||||
stepSec := int64(query.StepInterval.Seconds())
|
||||
@@ -121,7 +122,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}
|
||||
return "", []any{}, err
|
||||
}
|
||||
sb.SelectMore(col)
|
||||
}
|
||||
@@ -149,7 +150,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
Variables: variables,
|
||||
})
|
||||
if err != nil {
|
||||
return "", []any{}
|
||||
return "", []any{}, err
|
||||
}
|
||||
}
|
||||
if filterWhere != nil {
|
||||
@@ -163,7 +164,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
}
|
||||
|
||||
func (b *meterQueryStatementBuilder) buildTemporalAggregationCTE(
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
@@ -17,6 +18,48 @@ import (
|
||||
const (
|
||||
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
|
||||
RateWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Assume linear growth to next point
|
||||
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected
|
||||
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
ELSE
|
||||
-- Normal case: calculate rate
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
|
||||
(ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
END`
|
||||
|
||||
IncreaseWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Calculate the interpolated increase for this interval
|
||||
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)) *
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected: the increase is the current value
|
||||
per_series_value
|
||||
ELSE
|
||||
-- Normal case: calculate increase
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
|
||||
END`
|
||||
)
|
||||
|
||||
type MetricQueryStatementBuilder struct {
|
||||
@@ -71,6 +114,7 @@ func GetKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation])
|
||||
keySelectors[idx].MetricContext = &telemetrytypes.MetricContext{
|
||||
MetricName: query.Aggregations[0].MetricName,
|
||||
}
|
||||
keySelectors[idx].Source = query.Source
|
||||
}
|
||||
return keySelectors
|
||||
}
|
||||
@@ -444,6 +488,9 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
|
||||
if os.Getenv("INTERPOLATION_ENABLED") == "true" {
|
||||
rateExpr = RateWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
@@ -456,6 +503,9 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
|
||||
if os.Getenv("INTERPOLATION_ENABLED") == "true" {
|
||||
incExpr = IncreaseWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
|
||||
@@ -490,7 +490,7 @@ func (b *traceQueryStatementBuilder) buildTimeSeriesQuery(
|
||||
// Keep original column expressions so we can build the tuple
|
||||
fieldNames := make([]string, 0, len(query.GroupBy))
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -632,7 +632,7 @@ func (b *traceQueryStatementBuilder) buildScalarQuery(
|
||||
|
||||
var allGroupByArgs []any
|
||||
for _, gb := range query.GroupBy {
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString)
|
||||
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, "", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -39,6 +39,8 @@ func (m *alertMigrateV5) Migrate(ctx context.Context, ruleData map[string]any) b
|
||||
return false
|
||||
}
|
||||
|
||||
m.logger.InfoContext(ctx, "migrating alert", "alert_name", ruleData["alert"])
|
||||
|
||||
ruleCondition, ok := ruleData["condition"].(map[string]any)
|
||||
if !ok {
|
||||
m.logger.WarnContext(ctx, "didn't find condition")
|
||||
|
||||
@@ -347,7 +347,7 @@ func (mc *migrateCommon) createAggregations(ctx context.Context, queryData map[s
|
||||
aggregateAttr, hasAttr := queryData["aggregateAttribute"].(map[string]any)
|
||||
dataSource, _ := queryData["dataSource"].(string)
|
||||
|
||||
if aggregateOp == "noop" {
|
||||
if aggregateOp == "noop" && dataSource != "metrics" {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -696,8 +696,16 @@ func (mc *migrateCommon) buildCondition(ctx context.Context, key, operator strin
|
||||
case "<=":
|
||||
return fmt.Sprintf("%s <= %s", key, formattedValue)
|
||||
case "in", "IN":
|
||||
if !strings.HasPrefix(formattedValue, "[") && !mc.isVariable(formattedValue) {
|
||||
mc.logger.WarnContext(ctx, "multi-value operator in found with single value", "key", key, "formatted_value", formattedValue)
|
||||
return fmt.Sprintf("%s = %s", key, formattedValue)
|
||||
}
|
||||
return fmt.Sprintf("%s IN %s", key, formattedValue)
|
||||
case "nin", "NOT IN":
|
||||
if !strings.HasPrefix(formattedValue, "[") && !mc.isVariable(formattedValue) {
|
||||
mc.logger.WarnContext(ctx, "multi-value operator not in found with single value", "key", key, "formatted_value", formattedValue)
|
||||
return fmt.Sprintf("%s != %s", key, formattedValue)
|
||||
}
|
||||
return fmt.Sprintf("%s NOT IN %s", key, formattedValue)
|
||||
case "like", "LIKE":
|
||||
return fmt.Sprintf("%s LIKE %s", key, formattedValue)
|
||||
@@ -892,6 +900,7 @@ func (mc *migrateCommon) isVariable(s string) bool {
|
||||
s = strings.TrimSpace(s)
|
||||
|
||||
patterns := []string{
|
||||
`^\{.*\}$`, // {var} or {.var}
|
||||
`^\{\{.*\}\}$`, // {{var}} or {{.var}}
|
||||
`^\$.*$`, // $var or $service.name
|
||||
`^\[\[.*\]\]$`, // [[var]] or [[.var]]
|
||||
@@ -919,6 +928,11 @@ func (mc *migrateCommon) normalizeVariable(ctx context.Context, s string) string
|
||||
varName = strings.TrimPrefix(varName, ".")
|
||||
// this is probably going to be problem if user has $ as start of key
|
||||
varName = strings.TrimPrefix(varName, "$")
|
||||
} else if strings.HasPrefix(s, "{") && strings.HasSuffix(s, "}") { // {var} or {.var}
|
||||
varName = strings.TrimPrefix(strings.TrimSuffix(s, "}"), "{")
|
||||
varName = strings.TrimPrefix(varName, ".")
|
||||
// this is probably going to be problem if user has $ as start of key
|
||||
varName = strings.TrimPrefix(varName, "$")
|
||||
} else if strings.HasPrefix(s, "[[") && strings.HasSuffix(s, "]]") {
|
||||
// [[var]] or [[.var]]
|
||||
varName = strings.TrimPrefix(strings.TrimSuffix(s, "]]"), "[[")
|
||||
|
||||
@@ -37,6 +37,8 @@ func (m *dashboardMigrateV5) Migrate(ctx context.Context, dashboardData map[stri
|
||||
return false
|
||||
}
|
||||
|
||||
m.logger.InfoContext(ctx, "migrating dashboard", "dashboard_name", dashboardData["title"])
|
||||
|
||||
// if there is a white space in variable, replace it
|
||||
if variables, ok := dashboardData["variables"].(map[string]any); ok {
|
||||
for _, variable := range variables {
|
||||
@@ -74,6 +76,13 @@ func (migration *dashboardMigrateV5) updateWidget(ctx context.Context, widget ma
|
||||
return false
|
||||
}
|
||||
|
||||
if qType, ok := query["queryType"]; ok {
|
||||
if qType == "promql" || qType == "clickhouse_sql" {
|
||||
migration.logger.InfoContext(ctx, "nothing to migrate for query type", "query_type", qType)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
builder, ok := query["builder"].(map[string]any)
|
||||
if !ok {
|
||||
return false
|
||||
|
||||
@@ -319,6 +319,23 @@ func (r *QueryRangeRequest) IsAnomalyRequest() (*QueryBuilderQuery[MetricAggrega
|
||||
return &q, hasAnomaly
|
||||
}
|
||||
|
||||
// We do not support fill gaps for these queries. Maybe support in future?
|
||||
func (r *QueryRangeRequest) SkipFillGaps(name string) bool {
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
switch spec := query.Spec.(type) {
|
||||
case PromQuery:
|
||||
if spec.Name == name {
|
||||
return true
|
||||
}
|
||||
case ClickHouseQuery:
|
||||
if spec.Name == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements custom JSON unmarshaling to disallow unknown fields
|
||||
func (r *QueryRangeRequest) UnmarshalJSON(data []byte) error {
|
||||
// Define a type alias to avoid infinite recursion
|
||||
|
||||
@@ -20,7 +20,7 @@ def zookeeper(
|
||||
def create() -> types.TestContainerDocker:
|
||||
version = request.config.getoption("--zookeeper-version")
|
||||
|
||||
container = DockerContainer(image=f"bitnami/zookeeper:{version}")
|
||||
container = DockerContainer(image=f"signoz/zookeeper:{version}")
|
||||
container.with_env("ALLOW_ANONYMOUS_LOGIN", "yes")
|
||||
container.with_exposed_ports(2181)
|
||||
container.with_network(network=network)
|
||||
|
||||
Reference in New Issue
Block a user