mirror of
https://github.com/SigNoz/signoz.git
synced 2026-03-13 16:52:07 +00:00
Compare commits
11 Commits
nv/6204
...
issue_4203
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
28375c8c1e | ||
|
|
dcc8173c79 | ||
|
|
4b4ef5ce58 | ||
|
|
5b8d5fbfd3 | ||
|
|
0271be11e6 | ||
|
|
92d220c4d9 | ||
|
|
0ed8169bad | ||
|
|
ed553fb02e | ||
|
|
47daba3c17 | ||
|
|
2b3310809a | ||
|
|
542a648cc3 |
5
.github/CODEOWNERS
vendored
5
.github/CODEOWNERS
vendored
@@ -127,12 +127,15 @@
|
||||
/frontend/src/pages/DashboardsListPage/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/ListOfDashboard/ @SigNoz/pulse-frontend
|
||||
|
||||
# Dashboard Widget Page
|
||||
/frontend/src/pages/DashboardWidget/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
|
||||
|
||||
## Dashboard Page
|
||||
|
||||
/frontend/src/pages/DashboardPage/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/DashboardContainer/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/GridCardLayout/ @SigNoz/pulse-frontend
|
||||
/frontend/src/container/NewWidget/ @SigNoz/pulse-frontend
|
||||
|
||||
## Public Dashboard Page
|
||||
|
||||
|
||||
@@ -1768,19 +1768,19 @@ components:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
expires_at:
|
||||
expiresAt:
|
||||
minimum: 0
|
||||
type: integer
|
||||
id:
|
||||
type: string
|
||||
key:
|
||||
type: string
|
||||
last_used:
|
||||
lastObservedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
service_account_id:
|
||||
serviceAccountId:
|
||||
type: string
|
||||
updatedAt:
|
||||
format: date-time
|
||||
@@ -1788,9 +1788,9 @@ components:
|
||||
required:
|
||||
- id
|
||||
- key
|
||||
- expires_at
|
||||
- last_used
|
||||
- service_account_id
|
||||
- expiresAt
|
||||
- lastObservedAt
|
||||
- serviceAccountId
|
||||
type: object
|
||||
ServiceaccounttypesGettableFactorAPIKeyWithKey:
|
||||
properties:
|
||||
@@ -1804,14 +1804,14 @@ components:
|
||||
type: object
|
||||
ServiceaccounttypesPostableFactorAPIKey:
|
||||
properties:
|
||||
expires_at:
|
||||
expiresAt:
|
||||
minimum: 0
|
||||
type: integer
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- expires_at
|
||||
- expiresAt
|
||||
type: object
|
||||
ServiceaccounttypesPostableServiceAccount:
|
||||
properties:
|
||||
@@ -1833,13 +1833,16 @@ components:
|
||||
createdAt:
|
||||
format: date-time
|
||||
type: string
|
||||
deletedAt:
|
||||
format: date-time
|
||||
type: string
|
||||
email:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
orgID:
|
||||
orgId:
|
||||
type: string
|
||||
roles:
|
||||
items:
|
||||
@@ -1856,18 +1859,19 @@ components:
|
||||
- email
|
||||
- roles
|
||||
- status
|
||||
- orgID
|
||||
- orgId
|
||||
- deletedAt
|
||||
type: object
|
||||
ServiceaccounttypesUpdatableFactorAPIKey:
|
||||
properties:
|
||||
expires_at:
|
||||
expiresAt:
|
||||
minimum: 0
|
||||
type: integer
|
||||
name:
|
||||
type: string
|
||||
required:
|
||||
- name
|
||||
- expires_at
|
||||
- expiresAt
|
||||
type: object
|
||||
ServiceaccounttypesUpdatableServiceAccount:
|
||||
properties:
|
||||
|
||||
@@ -2,39 +2,45 @@ module base
|
||||
|
||||
type organisation
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define read: [user, serviceaccount, role#assignee]
|
||||
define update: [user, serviceaccount, role#assignee]
|
||||
|
||||
type user
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
define read: [user, serviceaccount, role#assignee]
|
||||
define update: [user, serviceaccount, role#assignee]
|
||||
define delete: [user, serviceaccount, role#assignee]
|
||||
|
||||
type serviceaccount
|
||||
relations
|
||||
define read: [user, serviceaccount, role#assignee]
|
||||
define update: [user, serviceaccount, role#assignee]
|
||||
define delete: [user, serviceaccount, role#assignee]
|
||||
|
||||
type anonymous
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user, anonymous]
|
||||
define assignee: [user, serviceaccount, anonymous]
|
||||
|
||||
define read: [user, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
define read: [user, serviceaccount, role#assignee]
|
||||
define update: [user, serviceaccount, role#assignee]
|
||||
define delete: [user, serviceaccount, role#assignee]
|
||||
|
||||
type metaresources
|
||||
relations
|
||||
define create: [user, role#assignee]
|
||||
define list: [user, role#assignee]
|
||||
define create: [user, serviceaccount, role#assignee]
|
||||
define list: [user, serviceaccount, role#assignee]
|
||||
|
||||
type metaresource
|
||||
relations
|
||||
define read: [user, anonymous, role#assignee]
|
||||
define update: [user, role#assignee]
|
||||
define delete: [user, role#assignee]
|
||||
define read: [user, serviceaccount, anonymous, role#assignee]
|
||||
define update: [user, serviceaccount, role#assignee]
|
||||
define delete: [user, serviceaccount, role#assignee]
|
||||
|
||||
define block: [user, role#assignee]
|
||||
define block: [user, serviceaccount, role#assignee]
|
||||
|
||||
|
||||
type telemetryresource
|
||||
relations
|
||||
define read: [user, role#assignee]
|
||||
define read: [user, serviceaccount, role#assignee]
|
||||
@@ -128,6 +128,7 @@ function PrivateRoute({ children }: PrivateRouteProps): JSX.Element {
|
||||
isAdmin &&
|
||||
(path === ROUTES.SETTINGS ||
|
||||
path === ROUTES.ORG_SETTINGS ||
|
||||
path === ROUTES.MEMBERS_SETTINGS ||
|
||||
path === ROUTES.BILLING ||
|
||||
path === ROUTES.MY_SETTINGS);
|
||||
|
||||
|
||||
@@ -29,7 +29,6 @@ import posthog from 'posthog-js';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { IUser } from 'providers/App/types';
|
||||
import { CmdKProvider } from 'providers/cmdKProvider';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
@@ -321,6 +320,19 @@ function App(): JSX.Element {
|
||||
// Session Replay
|
||||
replaysSessionSampleRate: 0.1, // This sets the sample rate at 10%. You may want to change it to 100% while in development and then sample at a lower rate in production.
|
||||
replaysOnErrorSampleRate: 1.0, // If you're not already sampling the entire session, change the sample rate to 100% when sampling sessions where errors occur.
|
||||
beforeSend(event) {
|
||||
const sessionReplayUrl = posthog.get_session_replay_url?.({
|
||||
withTimestamp: true,
|
||||
});
|
||||
if (sessionReplayUrl) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
event.contexts = {
|
||||
...event.contexts,
|
||||
posthog: { session_replay_url: sessionReplayUrl },
|
||||
};
|
||||
}
|
||||
return event;
|
||||
},
|
||||
});
|
||||
|
||||
setIsSentryInitialized(true);
|
||||
@@ -371,28 +383,26 @@ function App(): JSX.Element {
|
||||
<PrivateRoute>
|
||||
<ResourceProvider>
|
||||
<QueryBuilderProvider>
|
||||
<DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AppLayout>
|
||||
<PreferenceContextProvider>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</PreferenceContextProvider>
|
||||
</AppLayout>
|
||||
</KeyboardHotkeysProvider>
|
||||
</DashboardProvider>
|
||||
<KeyboardHotkeysProvider>
|
||||
<AppLayout>
|
||||
<PreferenceContextProvider>
|
||||
<Suspense fallback={<Spinner size="large" tip="Loading..." />}>
|
||||
<Switch>
|
||||
{routes.map(({ path, component, exact }) => (
|
||||
<Route
|
||||
key={`${path}`}
|
||||
exact={exact}
|
||||
path={path}
|
||||
component={component}
|
||||
/>
|
||||
))}
|
||||
<Route exact path="/" component={Home} />
|
||||
<Route path="*" component={NotFound} />
|
||||
</Switch>
|
||||
</Suspense>
|
||||
</PreferenceContextProvider>
|
||||
</AppLayout>
|
||||
</KeyboardHotkeysProvider>
|
||||
</QueryBuilderProvider>
|
||||
</ResourceProvider>
|
||||
</PrivateRoute>
|
||||
|
||||
@@ -2100,7 +2100,7 @@ export interface ServiceaccounttypesFactorAPIKeyDTO {
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
expires_at: number;
|
||||
expiresAt: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2113,7 +2113,7 @@ export interface ServiceaccounttypesFactorAPIKeyDTO {
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
last_used: Date;
|
||||
lastObservedAt: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2121,7 +2121,7 @@ export interface ServiceaccounttypesFactorAPIKeyDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
service_account_id: string;
|
||||
serviceAccountId: string;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
@@ -2145,7 +2145,7 @@ export interface ServiceaccounttypesPostableFactorAPIKeyDTO {
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
expires_at: number;
|
||||
expiresAt: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2173,6 +2173,11 @@ export interface ServiceaccounttypesServiceAccountDTO {
|
||||
* @format date-time
|
||||
*/
|
||||
createdAt?: Date;
|
||||
/**
|
||||
* @type string
|
||||
* @format date-time
|
||||
*/
|
||||
deletedAt: Date;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
@@ -2188,7 +2193,7 @@ export interface ServiceaccounttypesServiceAccountDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
orgID: string;
|
||||
orgId: string;
|
||||
/**
|
||||
* @type array
|
||||
*/
|
||||
@@ -2209,7 +2214,7 @@ export interface ServiceaccounttypesUpdatableFactorAPIKeyDTO {
|
||||
* @type integer
|
||||
* @minimum 0
|
||||
*/
|
||||
expires_at: number;
|
||||
expiresAt: number;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
@@ -1,31 +1,6 @@
|
||||
.custom-time-picker {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
|
||||
.zoom-out-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
flex-shrink: 0;
|
||||
color: var(--foreground);
|
||||
border: 1px solid var(--border);
|
||||
border-radius: 2px;
|
||||
box-shadow: none;
|
||||
padding: 10px;
|
||||
height: 33px;
|
||||
|
||||
&:hover:not(:disabled) {
|
||||
color: var(--bg-vanilla-100);
|
||||
background: var(--primary);
|
||||
}
|
||||
|
||||
&:disabled {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
}
|
||||
flex-direction: column;
|
||||
|
||||
.timeSelection-input {
|
||||
&:hover {
|
||||
|
||||
@@ -16,15 +16,6 @@ jest.mock('react-router-dom', () => {
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
...jest.requireActual('react-redux'),
|
||||
useDispatch: jest.fn(() => jest.fn()),
|
||||
useSelector: jest.fn(() => ({
|
||||
minTime: 0,
|
||||
maxTime: Date.now(),
|
||||
})),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => {
|
||||
const actual = jest.requireActual('providers/Timezone');
|
||||
|
||||
|
||||
@@ -7,11 +7,9 @@ import {
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { Input, InputRef, Popover, Tooltip } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { DateTimeRangeType } from 'container/TopNav/CustomDateTimeModal';
|
||||
import {
|
||||
FixedDurationSuggestionOptions,
|
||||
@@ -19,11 +17,9 @@ import {
|
||||
RelativeDurationSuggestionOptions,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/constants';
|
||||
import dayjs from 'dayjs';
|
||||
import { useZoomOut } from 'hooks/useZoomOut';
|
||||
import { isValidShortHandDateTimeFormat } from 'lib/getMinMax';
|
||||
import { isZoomOutDisabled } from 'lib/zoomOutUtils';
|
||||
import { defaultTo, isFunction, noop } from 'lodash-es';
|
||||
import { ChevronDown, ChevronUp, ZoomOut } from 'lucide-react';
|
||||
import { ChevronDown, ChevronUp } from 'lucide-react';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { getTimeDifference, validateEpochRange } from 'utils/epochUtils';
|
||||
import { popupContainer } from 'utils/selectPopupContainer';
|
||||
@@ -70,8 +66,6 @@ interface CustomTimePickerProps {
|
||||
showRecentlyUsed?: boolean;
|
||||
minTime: number;
|
||||
maxTime: number;
|
||||
/** When true, zoom-out button is hidden (e.g. in drawer/modal time selection) */
|
||||
isModalTimeSelection?: boolean;
|
||||
}
|
||||
|
||||
function CustomTimePicker({
|
||||
@@ -94,7 +88,6 @@ function CustomTimePicker({
|
||||
showRecentlyUsed = true,
|
||||
minTime,
|
||||
maxTime,
|
||||
isModalTimeSelection = false,
|
||||
}: CustomTimePickerProps): JSX.Element {
|
||||
const [
|
||||
selectedTimePlaceholderValue,
|
||||
@@ -123,14 +116,6 @@ function CustomTimePicker({
|
||||
|
||||
const [isOpenedFromFooter, setIsOpenedFromFooter] = useState(false);
|
||||
|
||||
const durationMs = (maxTime - minTime) / 1e6;
|
||||
const zoomOutDisabled = showLiveLogs || isZoomOutDisabled(durationMs);
|
||||
|
||||
const handleZoomOut = useZoomOut({
|
||||
isDisabled: zoomOutDisabled,
|
||||
urlParamsToDelete: [QueryParams.activeLogId],
|
||||
});
|
||||
|
||||
// function to get selected time in Last 1m, Last 2h, Last 3d, Last 4w format
|
||||
// 1m, 2h, 3d, 4w -> Last 1 minute, Last 2 hours, Last 3 days, Last 4 weeks
|
||||
const getSelectedTimeRangeLabelInRelativeFormat = (
|
||||
@@ -646,23 +631,6 @@ function CustomTimePicker({
|
||||
/>
|
||||
</Popover>
|
||||
</Tooltip>
|
||||
{!showLiveLogs && !isModalTimeSelection && (
|
||||
<Tooltip
|
||||
title={
|
||||
zoomOutDisabled ? 'Zoom out time range is limited to 1 month' : 'Zoom out'
|
||||
}
|
||||
>
|
||||
<span>
|
||||
<Button
|
||||
className="zoom-out-btn"
|
||||
onClick={handleZoomOut}
|
||||
disabled={zoomOutDisabled}
|
||||
data-testid="zoom-out-btn"
|
||||
prefixIcon={<ZoomOut size={14} />}
|
||||
/>
|
||||
</span>
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,169 +0,0 @@
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import CustomTimePicker from '../CustomTimePicker';
|
||||
|
||||
const MS_PER_MIN = 60 * 1000;
|
||||
const NOW_MS = 1705312800000;
|
||||
|
||||
const mockDispatch = jest.fn();
|
||||
const mockSafeNavigate = jest.fn();
|
||||
const mockUrlQueryDelete = jest.fn();
|
||||
const mockUrlQuerySet = jest.fn();
|
||||
|
||||
interface MockAppState {
|
||||
globalTime: Pick<GlobalReducer, 'minTime' | 'maxTime'>;
|
||||
}
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
useDispatch: (): jest.Mock => mockDispatch,
|
||||
useSelector: (selector: (state: MockAppState) => unknown): unknown => {
|
||||
const mockState: MockAppState = {
|
||||
globalTime: {
|
||||
minTime: (NOW_MS - 15 * MS_PER_MIN) * 1e6,
|
||||
maxTime: NOW_MS * 1e6,
|
||||
},
|
||||
};
|
||||
return selector(mockState);
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): { safeNavigate: jest.Mock } => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
interface MockUrlQuery {
|
||||
delete: typeof mockUrlQueryDelete;
|
||||
set: typeof mockUrlQuerySet;
|
||||
get: () => null;
|
||||
toString: () => string;
|
||||
}
|
||||
|
||||
jest.mock('hooks/useUrlQuery', () => ({
|
||||
__esModule: true,
|
||||
default: (): MockUrlQuery => ({
|
||||
delete: mockUrlQueryDelete,
|
||||
set: mockUrlQuerySet,
|
||||
get: (): null => null,
|
||||
toString: (): string => 'relativeTime=45m',
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
useTimezone: (): { timezone: { value: string; offset: string } } => ({
|
||||
timezone: { value: 'UTC', offset: 'UTC' },
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
useLocation: (): { pathname: string } => ({ pathname: '/logs-explorer' }),
|
||||
}));
|
||||
|
||||
const MS_PER_DAY = 24 * 60 * 60 * 1000;
|
||||
const now = Date.now();
|
||||
const defaultProps = {
|
||||
onSelect: jest.fn(),
|
||||
onError: jest.fn(),
|
||||
selectedValue: '15m',
|
||||
selectedTime: '15m',
|
||||
onValidCustomDateChange: jest.fn(),
|
||||
open: false,
|
||||
setOpen: jest.fn(),
|
||||
items: [
|
||||
{ value: '15m', label: 'Last 15 minutes' },
|
||||
{ value: '1h', label: 'Last 1 hour' },
|
||||
],
|
||||
minTime: (now - 15 * 60 * 1000) * 1e6,
|
||||
maxTime: now * 1e6,
|
||||
};
|
||||
|
||||
describe('CustomTimePicker - zoom out button', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
jest.spyOn(Date, 'now').mockReturnValue(NOW_MS);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('should render zoom out button when showLiveLogs is false', () => {
|
||||
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
|
||||
|
||||
expect(screen.getByTestId('zoom-out-btn')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render zoom out button when showLiveLogs is true', () => {
|
||||
render(<CustomTimePicker {...defaultProps} showLiveLogs={true} />);
|
||||
|
||||
expect(screen.queryByTestId('zoom-out-btn')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not render zoom out button when isModalTimeSelection is true', () => {
|
||||
render(
|
||||
<CustomTimePicker
|
||||
{...defaultProps}
|
||||
showLiveLogs={false}
|
||||
isModalTimeSelection={true}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('zoom-out-btn')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call handleZoomOut when zoom out button is clicked', async () => {
|
||||
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
|
||||
|
||||
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
|
||||
await userEvent.click(zoomOutBtn);
|
||||
|
||||
expect(mockDispatch).toHaveBeenCalled();
|
||||
expect(mockUrlQuerySet).toHaveBeenCalledWith(QueryParams.relativeTime, '45m');
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/logs-explorer\?relativeTime=45m/),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use real ladder logic: 15m range zooms to 45m preset and updates URL', async () => {
|
||||
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
|
||||
|
||||
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
|
||||
await userEvent.click(zoomOutBtn);
|
||||
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.startTime);
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.endTime);
|
||||
expect(mockUrlQuerySet).toHaveBeenCalledWith(QueryParams.relativeTime, '45m');
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/\/logs-explorer\?relativeTime=45m/),
|
||||
);
|
||||
expect(mockDispatch).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should delete activeLogId when zoom out is clicked', async () => {
|
||||
render(<CustomTimePicker {...defaultProps} showLiveLogs={false} />);
|
||||
|
||||
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
|
||||
await userEvent.click(zoomOutBtn);
|
||||
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.activeLogId);
|
||||
});
|
||||
|
||||
it('should disable zoom button when time range is >= 1 month', () => {
|
||||
const now = Date.now();
|
||||
render(
|
||||
<CustomTimePicker
|
||||
{...defaultProps}
|
||||
minTime={(now - 31 * MS_PER_DAY) * 1e6}
|
||||
maxTime={now * 1e6}
|
||||
showLiveLogs={false}
|
||||
/>,
|
||||
);
|
||||
|
||||
const zoomOutBtn = screen.getByTestId('zoom-out-btn');
|
||||
expect(zoomOutBtn).toBeDisabled();
|
||||
});
|
||||
});
|
||||
@@ -30,14 +30,15 @@ export default function CustomDomainEditModal({
|
||||
onClearError,
|
||||
onSubmit,
|
||||
}: CustomDomainEditModalProps): JSX.Element {
|
||||
const [value, setValue] = useState(customDomainSubdomain ?? '');
|
||||
const initialSubdomain = customDomainSubdomain ?? '';
|
||||
const [value, setValue] = useState(initialSubdomain);
|
||||
const [validationError, setValidationError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
setValue(customDomainSubdomain ?? '');
|
||||
setValue(initialSubdomain);
|
||||
}
|
||||
}, [isOpen, customDomainSubdomain]);
|
||||
}, [isOpen, initialSubdomain]);
|
||||
|
||||
const handleClose = (): void => {
|
||||
setValidationError(null);
|
||||
@@ -58,6 +59,11 @@ export default function CustomDomainEditModal({
|
||||
};
|
||||
|
||||
const handleSubmit = (): void => {
|
||||
if (value === initialSubdomain) {
|
||||
setValidationError('Input is unchanged');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!value) {
|
||||
setValidationError('This field is required');
|
||||
return;
|
||||
@@ -84,7 +90,7 @@ export default function CustomDomainEditModal({
|
||||
|
||||
const hasError = Boolean(errorMessage);
|
||||
|
||||
const statusIcon = ((): JSX.Element => {
|
||||
const statusIcon = ((): JSX.Element | null => {
|
||||
if (isLoading) {
|
||||
return (
|
||||
<LoaderCircle size={16} className="animate-spin edit-modal-status-icon" />
|
||||
@@ -95,7 +101,9 @@ export default function CustomDomainEditModal({
|
||||
return <CircleAlert size={16} color={Color.BG_CHERRY_500} />;
|
||||
}
|
||||
|
||||
return <CircleCheck size={16} color={Color.BG_FOREST_500} />;
|
||||
return value && value.length >= 3 ? (
|
||||
<CircleCheck size={16} color={Color.BG_FOREST_500} />
|
||||
) : null;
|
||||
})();
|
||||
|
||||
return (
|
||||
@@ -189,7 +197,7 @@ export default function CustomDomainEditModal({
|
||||
color="primary"
|
||||
className="edit-modal-apply-btn"
|
||||
onClick={handleSubmit}
|
||||
disabled={isLoading}
|
||||
disabled={isLoading || value === initialSubdomain}
|
||||
loading={isLoading}
|
||||
>
|
||||
Apply Changes
|
||||
|
||||
@@ -81,6 +81,10 @@
|
||||
padding-left: 26px;
|
||||
}
|
||||
|
||||
.custom-domain-card-meta-row.workspace-name-hidden {
|
||||
padding-left: 0;
|
||||
}
|
||||
|
||||
.custom-domain-card-meta-timezone {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
@@ -117,32 +121,6 @@
|
||||
background: var(--l2-border);
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.custom-domain-card-bottom {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--spacing-5);
|
||||
padding: var(--padding-3);
|
||||
}
|
||||
|
||||
.custom-domain-card-license {
|
||||
color: var(--l1-foreground);
|
||||
font-size: var(--paragraph-base-400-font-size);
|
||||
line-height: var(--line-height-20);
|
||||
letter-spacing: -0.07px;
|
||||
}
|
||||
|
||||
.custom-domain-plan-badge {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 0 2px;
|
||||
border-radius: 2px;
|
||||
background: var(--l2-background);
|
||||
color: var(--l2-foreground);
|
||||
font-family: 'SF Mono', 'Fira Code', monospace;
|
||||
font-size: var(--paragraph-base-400-font-size);
|
||||
line-height: var(--line-height-20);
|
||||
}
|
||||
}
|
||||
|
||||
.workspace-url-trigger {
|
||||
|
||||
@@ -69,8 +69,9 @@ function DomainUpdateToast({
|
||||
}
|
||||
|
||||
export default function CustomDomainSettings(): JSX.Element {
|
||||
const { org, activeLicense } = useAppContext();
|
||||
const { org } = useAppContext();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
const [isEditModalOpen, setIsEditModalOpen] = useState(false);
|
||||
const [isPollingEnabled, setIsPollingEnabled] = useState(false);
|
||||
const [hosts, setHosts] = useState<ZeustypesHostDTO[] | null>(null);
|
||||
@@ -175,7 +176,8 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
[hosts, activeHost],
|
||||
);
|
||||
|
||||
const planName = activeLicense?.plan?.name;
|
||||
const workspaceName =
|
||||
org?.[0]?.displayName || customDomainSubdomain || activeHost?.name;
|
||||
|
||||
if (isLoadingHosts) {
|
||||
return (
|
||||
@@ -191,106 +193,98 @@ export default function CustomDomainSettings(): JSX.Element {
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="custom-domain-card">
|
||||
<div className="custom-domain-card-top">
|
||||
<div className="custom-domain-card-info">
|
||||
<div className="custom-domain-card-top">
|
||||
<div className="custom-domain-card-info">
|
||||
{!!workspaceName && (
|
||||
<div className="custom-domain-card-name-row">
|
||||
<span className="beacon" />
|
||||
<span className="custom-domain-card-org-name">
|
||||
{org?.[0]?.displayName ? org?.[0]?.displayName : customDomainSubdomain}
|
||||
</span>
|
||||
<span className="custom-domain-card-org-name">{workspaceName}</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="custom-domain-card-meta-row">
|
||||
<Dropdown
|
||||
trigger={['click']}
|
||||
dropdownRender={(): JSX.Element => (
|
||||
<div className="workspace-url-dropdown">
|
||||
<span className="workspace-url-dropdown-header">
|
||||
All Workspace URLs
|
||||
</span>
|
||||
<div className="workspace-url-dropdown-divider" />
|
||||
{sortedHosts.map((host) => {
|
||||
const isActive = host.name === activeHost?.name;
|
||||
return (
|
||||
<a
|
||||
key={host.name}
|
||||
href={host.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className={`workspace-url-dropdown-item${
|
||||
isActive ? ' workspace-url-dropdown-item--active' : ''
|
||||
}`}
|
||||
>
|
||||
<span className="workspace-url-dropdown-item-label">
|
||||
{stripProtocol(host.url ?? '')}
|
||||
</span>
|
||||
{isActive ? (
|
||||
<Check size={14} className="workspace-url-dropdown-item-check" />
|
||||
) : (
|
||||
<ExternalLink
|
||||
size={12}
|
||||
className="workspace-url-dropdown-item-external"
|
||||
/>
|
||||
)}
|
||||
</a>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
type="button"
|
||||
size="xs"
|
||||
className="workspace-url-trigger"
|
||||
disabled={isFetchingHosts}
|
||||
>
|
||||
<Link2 size={12} />
|
||||
<span>{stripProtocol(activeHost?.url ?? '')}</span>
|
||||
<ChevronDown size={12} />
|
||||
</Button>
|
||||
</Dropdown>
|
||||
<span className="custom-domain-card-meta-timezone">
|
||||
<Clock size={11} />
|
||||
{timezone.offset}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
variant="solid"
|
||||
size="sm"
|
||||
className="custom-domain-edit-button"
|
||||
prefixIcon={<FilePenLine size={12} />}
|
||||
disabled={isFetchingHosts || isPollingEnabled}
|
||||
onClick={(): void => setIsEditModalOpen(true)}
|
||||
<div
|
||||
className={`custom-domain-card-meta-row ${
|
||||
!workspaceName ? 'workspace-name-hidden' : ''
|
||||
}`}
|
||||
>
|
||||
Edit workspace link
|
||||
</Button>
|
||||
<Dropdown
|
||||
trigger={['click']}
|
||||
dropdownRender={(): JSX.Element => (
|
||||
<div className="workspace-url-dropdown">
|
||||
<span className="workspace-url-dropdown-header">
|
||||
All Workspace URLs
|
||||
</span>
|
||||
<div className="workspace-url-dropdown-divider" />
|
||||
{sortedHosts.map((host) => {
|
||||
const isActive = host.name === activeHost?.name;
|
||||
return (
|
||||
<a
|
||||
key={host.name}
|
||||
href={host.url}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className={`workspace-url-dropdown-item${
|
||||
isActive ? ' workspace-url-dropdown-item--active' : ''
|
||||
}`}
|
||||
>
|
||||
<span className="workspace-url-dropdown-item-label">
|
||||
{stripProtocol(host.url ?? '')}
|
||||
</span>
|
||||
{isActive ? (
|
||||
<Check size={14} className="workspace-url-dropdown-item-check" />
|
||||
) : (
|
||||
<ExternalLink
|
||||
size={12}
|
||||
className="workspace-url-dropdown-item-external"
|
||||
/>
|
||||
)}
|
||||
</a>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
type="button"
|
||||
size="xs"
|
||||
className="workspace-url-trigger"
|
||||
disabled={isFetchingHosts}
|
||||
>
|
||||
<Link2 size={12} />
|
||||
<span>{stripProtocol(activeHost?.url ?? '')}</span>
|
||||
<ChevronDown size={12} />
|
||||
</Button>
|
||||
</Dropdown>
|
||||
<span className="custom-domain-card-meta-timezone">
|
||||
<Clock size={11} />
|
||||
{timezone.offset}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{isPollingEnabled && (
|
||||
<Callout
|
||||
type="info"
|
||||
showIcon
|
||||
className="custom-domain-callout"
|
||||
size="small"
|
||||
icon={<SolidAlertCircle size={13} color="primary" />}
|
||||
message={`Updating your URL to ⎯ ${customDomainSubdomain}.${dnsSuffix}. This may take a few mins.`}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="custom-domain-card-divider" />
|
||||
|
||||
<div className="custom-domain-card-bottom">
|
||||
<span className="beacon" />
|
||||
<span className="custom-domain-card-license">
|
||||
{planName && <code className="custom-domain-plan-badge">{planName}</code>}{' '}
|
||||
license is currently active
|
||||
</span>
|
||||
</div>
|
||||
<Button
|
||||
variant="solid"
|
||||
size="sm"
|
||||
className="custom-domain-edit-button"
|
||||
prefixIcon={<FilePenLine size={12} />}
|
||||
disabled={isFetchingHosts || isPollingEnabled}
|
||||
onClick={(): void => setIsEditModalOpen(true)}
|
||||
>
|
||||
Edit workspace link
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{isPollingEnabled && (
|
||||
<Callout
|
||||
type="info"
|
||||
showIcon
|
||||
className="custom-domain-callout"
|
||||
size="small"
|
||||
icon={<SolidAlertCircle size={13} color="primary" />}
|
||||
message={`Updating your URL to ⎯ ${customDomainSubdomain}.${dnsSuffix}. This may take a few mins.`}
|
||||
/>
|
||||
)}
|
||||
|
||||
<CustomDomainEditModal
|
||||
isOpen={isEditModalOpen}
|
||||
onClose={(): void => setIsEditModalOpen(false)}
|
||||
|
||||
@@ -239,4 +239,87 @@ describe('CustomDomainSettings', () => {
|
||||
const { container } = render(toastRenderer('test-id'));
|
||||
expect(container).toHaveTextContent(/myteam\.test\.cloud/i);
|
||||
});
|
||||
|
||||
describe('Workspace Name rendering', () => {
|
||||
it('renders org displayName when available from appContext', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />, undefined, {
|
||||
appContextOverrides: {
|
||||
org: [{ id: 'xyz', displayName: 'My Org Name', createdAt: 0 }],
|
||||
},
|
||||
});
|
||||
|
||||
expect(await screen.findByText('My Org Name')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('falls back to customDomainSubdomain when org displayName is missing', async () => {
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(mockHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />, undefined, {
|
||||
appContextOverrides: { org: [] },
|
||||
});
|
||||
|
||||
expect(await screen.findByText('custom-host')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('falls back to activeHost.name when neither org name nor custom domain exists', async () => {
|
||||
const onlyDefaultHostResponse = {
|
||||
...mockHostsResponse,
|
||||
data: {
|
||||
...mockHostsResponse.data,
|
||||
hosts: mockHostsResponse.data.hosts
|
||||
? [mockHostsResponse.data.hosts[0]]
|
||||
: [],
|
||||
},
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(onlyDefaultHostResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
render(<CustomDomainSettings />, undefined, {
|
||||
appContextOverrides: { org: [] },
|
||||
});
|
||||
|
||||
// 'accepted-starfish' is the default host's name
|
||||
expect(await screen.findByText('accepted-starfish')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render the card name row if workspaceName is totally falsy', async () => {
|
||||
const emptyHostsResponse = {
|
||||
...mockHostsResponse,
|
||||
data: {
|
||||
...mockHostsResponse.data,
|
||||
hosts: [],
|
||||
},
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.get(ZEUS_HOSTS_ENDPOINT, (_, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(emptyHostsResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
const { container } = render(<CustomDomainSettings />, undefined, {
|
||||
appContextOverrides: { org: [] },
|
||||
});
|
||||
|
||||
await screen.findByRole('button', { name: /edit workspace link/i });
|
||||
|
||||
expect(
|
||||
container.querySelector('.custom-domain-card-name-row'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -34,11 +34,6 @@ const mockSafeNavigate = jest.fn();
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn(),
|
||||
useRouteMatch: jest.fn().mockReturnValue({
|
||||
params: {
|
||||
dashboardId: 4,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
@@ -69,7 +64,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
(useLocation as jest.Mock).mockReturnValue(mockLocation);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId="4">
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
@@ -110,7 +105,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
);
|
||||
const { getByTestId } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId="4">
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
@@ -149,7 +144,7 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
|
||||
const { getByText } = render(
|
||||
<MemoryRouter initialEntries={[DASHBOARD_PATH]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId="4">
|
||||
<DashboardDescription
|
||||
handle={{
|
||||
active: false,
|
||||
@@ -199,8 +194,6 @@ describe('Dashboard landing page actions header tests', () => {
|
||||
setLayouts: jest.fn(),
|
||||
setSelectedDashboard: jest.fn(),
|
||||
updatedTimeRef: { current: null },
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: jest.fn(),
|
||||
updateLocalStorageDashboardVariables: jest.fn(),
|
||||
dashboardQueryRangeCalled: false,
|
||||
setDashboardQueryRangeCalled: jest.fn(),
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
} from 'hooks/dashboard/useDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { updateDashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import {
|
||||
enqueueDescendantsOfVariable,
|
||||
@@ -30,7 +29,7 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboard();
|
||||
|
||||
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
|
||||
const { updateUrlVariable } = useVariablesFromUrl();
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const dashboardId = useDashboardVariablesSelector(
|
||||
@@ -50,15 +49,6 @@ function DashboardVariableSelection(): JSX.Element | null {
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// Initialize variables with default values if not in URL
|
||||
initializeDefaultVariables(
|
||||
dashboardVariables,
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
);
|
||||
}, [getUrlVariables, updateUrlVariable, dashboardVariables]);
|
||||
|
||||
// Memoize the order key to avoid unnecessary triggers
|
||||
const variableOrderKey = useMemo(() => {
|
||||
const queryVariableOrderKey = dependencyData?.order?.join(',') ?? '';
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useScrollToWidgetIdStore } from 'providers/Dashboard/helpers/scrollToWidgetIdHelper';
|
||||
|
||||
import { useScrollWidgetIntoView } from '../useScrollWidgetIntoView';
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard');
|
||||
jest.mock('providers/Dashboard/helpers/scrollToWidgetIdHelper');
|
||||
|
||||
type MockHTMLElement = {
|
||||
scrollIntoView: jest.Mock;
|
||||
@@ -18,25 +18,35 @@ function createMockElement(): MockHTMLElement {
|
||||
}
|
||||
|
||||
describe('useScrollWidgetIntoView', () => {
|
||||
const mockedUseDashboard = useDashboard as jest.MockedFunction<
|
||||
typeof useDashboard
|
||||
const mockedUseScrollToWidgetIdStore = useScrollToWidgetIdStore as jest.MockedFunction<
|
||||
typeof useScrollToWidgetIdStore
|
||||
>;
|
||||
|
||||
let mockElement: MockHTMLElement;
|
||||
let ref: React.RefObject<HTMLDivElement>;
|
||||
let setToScrollWidgetId: jest.Mock;
|
||||
|
||||
function mockStore(toScrollWidgetId: string): void {
|
||||
const storeState = { toScrollWidgetId, setToScrollWidgetId };
|
||||
mockedUseScrollToWidgetIdStore.mockImplementation(
|
||||
(selector) =>
|
||||
selector(
|
||||
(storeState as unknown) as Parameters<typeof selector>[0],
|
||||
) as ReturnType<typeof useScrollToWidgetIdStore>,
|
||||
);
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockElement = createMockElement();
|
||||
ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
setToScrollWidgetId = jest.fn();
|
||||
});
|
||||
|
||||
it('scrolls into view and focuses when toScrollWidgetId matches widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'widget-id',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
mockStore('widget-id');
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
@@ -49,16 +59,7 @@ describe('useScrollWidgetIntoView', () => {
|
||||
});
|
||||
|
||||
it('does nothing when toScrollWidgetId does not match widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'other-widget',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
mockStore('other-widget');
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { RefObject, useEffect } from 'react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useScrollToWidgetIdStore } from 'providers/Dashboard/helpers/scrollToWidgetIdHelper';
|
||||
|
||||
/**
|
||||
* Scrolls the given widget container into view when the dashboard
|
||||
@@ -11,7 +11,10 @@ export function useScrollWidgetIntoView<T extends HTMLElement>(
|
||||
widgetId: string,
|
||||
widgetContainerRef: RefObject<T>,
|
||||
): void {
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const toScrollWidgetId = useScrollToWidgetIdStore((s) => s.toScrollWidgetId);
|
||||
const setToScrollWidgetId = useScrollToWidgetIdStore(
|
||||
(s) => s.setToScrollWidgetId,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widgetId) {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
@@ -34,8 +33,6 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
@@ -32,8 +31,6 @@ function HistogramPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
const config = useMemo(() => {
|
||||
return prepareHistogramPanelConfig({
|
||||
widget,
|
||||
|
||||
@@ -2,7 +2,6 @@ import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
|
||||
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
@@ -33,8 +32,6 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import setRetentionApi from 'api/settings/setRetention';
|
||||
import setRetentionApiV2 from 'api/settings/setRetentionV2';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
import CustomDomainSettings from 'container/CustomDomainSettings';
|
||||
import LicenseKeyRow from 'container/GeneralSettings/LicenseKeyRow/LicenseKeyRow';
|
||||
import GeneralSettingsCloud from 'container/GeneralSettingsCloud';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
@@ -81,7 +82,7 @@ function GeneralSettings({
|
||||
logsTtlValuesPayload,
|
||||
);
|
||||
|
||||
const { user } = useAppContext();
|
||||
const { user, activeLicense } = useAppContext();
|
||||
|
||||
const [setRetentionPermission] = useComponentPermission(
|
||||
['set_retention_period'],
|
||||
@@ -680,7 +681,15 @@ function GeneralSettings({
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{showCustomDomainSettings && <CustomDomainSettings />}
|
||||
{(showCustomDomainSettings || activeLicense?.key) && (
|
||||
<div className="custom-domain-card">
|
||||
{showCustomDomainSettings && <CustomDomainSettings />}
|
||||
{showCustomDomainSettings && activeLicense?.key && (
|
||||
<div className="custom-domain-card-divider" />
|
||||
)}
|
||||
{activeLicense?.key && <LicenseKeyRow />}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="retention-controls-container">
|
||||
<div className="retention-controls-header">
|
||||
|
||||
@@ -0,0 +1,65 @@
|
||||
.license-key-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
padding: var(--padding-2) var(--padding-3);
|
||||
gap: var(--spacing-5);
|
||||
|
||||
&__left {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 12px;
|
||||
color: var(--l2-foreground);
|
||||
|
||||
svg {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
|
||||
&__label {
|
||||
color: var(--l2-foreground);
|
||||
font-size: var(--paragraph-base-400-font-size);
|
||||
line-height: var(--line-height-20);
|
||||
letter-spacing: -0.07px;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
&__value {
|
||||
display: inline-flex;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
&__code {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 1px 2px;
|
||||
border-radius: 2px 0 0 2px;
|
||||
background: var(--l3-background);
|
||||
border: 1px solid var(--l2-border);
|
||||
color: var(--l2-foreground);
|
||||
font-family: 'SF Mono', 'Fira Code', 'Fira Mono', monospace;
|
||||
font-size: var(--paragraph-base-400-font-size);
|
||||
line-height: var(--line-height-20);
|
||||
white-space: nowrap;
|
||||
margin-right: -1px;
|
||||
}
|
||||
|
||||
&__copy-btn {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 26px;
|
||||
padding: 1px 2px;
|
||||
border-radius: 0 2px 2px 0;
|
||||
background: var(--l3-background);
|
||||
border: 1px solid var(--l2-border);
|
||||
color: var(--l2-foreground);
|
||||
cursor: pointer;
|
||||
flex-shrink: 0;
|
||||
height: 24px;
|
||||
|
||||
&:hover {
|
||||
background: var(--l3-background-hover);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
import { useCopyToClipboard } from 'react-use';
|
||||
import { Button } from '@signozhq/button';
|
||||
import { Copy, KeyRound } from '@signozhq/icons';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { getMaskedKey } from 'utils/maskedKey';
|
||||
|
||||
import './LicenseKeyRow.styles.scss';
|
||||
|
||||
function LicenseKeyRow(): JSX.Element | null {
|
||||
const { activeLicense } = useAppContext();
|
||||
const [, copyToClipboard] = useCopyToClipboard();
|
||||
|
||||
if (!activeLicense?.key) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const handleCopyLicenseKey = (text: string): void => {
|
||||
copyToClipboard(text);
|
||||
toast.success('License key copied to clipboard.', { richColors: true });
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="license-key-row">
|
||||
<span className="license-key-row__left">
|
||||
<KeyRound size={14} />
|
||||
<span className="license-key-row__label">SigNoz License Key</span>
|
||||
</span>
|
||||
<span className="license-key-row__value">
|
||||
<code className="license-key-row__code">
|
||||
{getMaskedKey(activeLicense.key)}
|
||||
</code>
|
||||
<Button
|
||||
type="button"
|
||||
size="xs"
|
||||
aria-label="Copy license key"
|
||||
data-testid="license-key-row-copy-btn"
|
||||
className="license-key-row__copy-btn"
|
||||
onClick={(): void => handleCopyLicenseKey(activeLicense.key)}
|
||||
>
|
||||
<Copy size={12} />
|
||||
</Button>
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default LicenseKeyRow;
|
||||
@@ -0,0 +1,61 @@
|
||||
import { render, screen, userEvent, waitFor } from 'tests/test-utils';
|
||||
|
||||
import LicenseKeyRow from '../LicenseKeyRow';
|
||||
|
||||
const mockCopyToClipboard = jest.fn();
|
||||
|
||||
jest.mock('react-use', () => ({
|
||||
__esModule: true,
|
||||
useCopyToClipboard: (): [unknown, jest.Mock] => [null, mockCopyToClipboard],
|
||||
}));
|
||||
|
||||
const mockToastSuccess = jest.fn();
|
||||
|
||||
jest.mock('@signozhq/sonner', () => ({
|
||||
toast: {
|
||||
success: (...args: unknown[]): unknown => mockToastSuccess(...args),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('LicenseKeyRow', () => {
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders nothing when activeLicense key is absent', () => {
|
||||
const { container } = render(<LicenseKeyRow />, undefined, {
|
||||
appContextOverrides: { activeLicense: null },
|
||||
});
|
||||
|
||||
expect(container).toBeEmptyDOMElement();
|
||||
});
|
||||
|
||||
it('renders label and masked key when activeLicense key exists', () => {
|
||||
render(<LicenseKeyRow />, undefined, {
|
||||
appContextOverrides: {
|
||||
activeLicense: { key: 'abcdefghij' } as any,
|
||||
},
|
||||
});
|
||||
|
||||
expect(screen.getByText('SigNoz License Key')).toBeInTheDocument();
|
||||
expect(screen.getByText('ab·······ij')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('calls copyToClipboard and shows success toast when clipboard is available', async () => {
|
||||
const user = userEvent.setup({ pointerEventsCheck: 0 });
|
||||
|
||||
render(<LicenseKeyRow />);
|
||||
|
||||
await user.click(screen.getByRole('button', { name: /copy license key/i }));
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCopyToClipboard).toHaveBeenCalledWith('test-key');
|
||||
expect(mockToastSuccess).toHaveBeenCalledWith(
|
||||
'License key copied to clipboard.',
|
||||
{
|
||||
richColors: true,
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,6 +5,7 @@ import logEvent from 'api/common/logEvent';
|
||||
import { DEFAULT_ENTITY_VERSION, ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
|
||||
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
|
||||
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
|
||||
@@ -67,11 +68,7 @@ function GridCardGraph({
|
||||
const [isInternalServerError, setIsInternalServerError] = useState<boolean>(
|
||||
false,
|
||||
);
|
||||
const {
|
||||
toScrollWidgetId,
|
||||
setToScrollWidgetId,
|
||||
setDashboardQueryRangeCalled,
|
||||
} = useDashboard();
|
||||
const { setDashboardQueryRangeCalled } = useDashboard();
|
||||
|
||||
const {
|
||||
minTime,
|
||||
@@ -109,20 +106,11 @@ function GridCardGraph({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const widgetContainerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const isVisible = useIntersectionObserver(graphRef, undefined, true);
|
||||
const isVisible = useIntersectionObserver(widgetContainerRef, undefined, true);
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget?.id || '', widgetContainerRef);
|
||||
|
||||
const updatedQuery = widget?.query;
|
||||
|
||||
@@ -306,7 +294,7 @@ function GridCardGraph({
|
||||
: headerMenuList;
|
||||
|
||||
return (
|
||||
<div style={{ height: '100%', width: '100%' }} ref={graphRef}>
|
||||
<div style={{ height: '100%', width: '100%' }} ref={widgetContainerRef}>
|
||||
{isEmptyLayout ? (
|
||||
<EmptyWidget />
|
||||
) : (
|
||||
|
||||
@@ -5,7 +5,6 @@ import NewWidget from 'container/NewWidget';
|
||||
import { logsPaginationQueryRangeSuccessResponse } from 'mocks-server/__mockdata__/logs_query_range';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { PreferenceContextProvider } from 'providers/preferences/context/PreferenceContextProvider';
|
||||
import i18n from 'ReactI18';
|
||||
import { act, fireEvent, render, screen, waitFor } from 'tests/test-utils';
|
||||
@@ -104,15 +103,13 @@ describe('LogsPanelComponent', () => {
|
||||
const renderComponent = async (): Promise<void> => {
|
||||
render(
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<DashboardProvider>
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
selectedGraph={PANEL_TYPES.LIST}
|
||||
fillSpans={undefined}
|
||||
yAxisUnit={undefined}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</DashboardProvider>
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
dashboardId=""
|
||||
selectedDashboard={undefined}
|
||||
selectedGraph={PANEL_TYPES.LIST}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</I18nextProvider>,
|
||||
);
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Typography } from 'antd';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { Copy } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { getMaskedKey } from 'utils/maskedKey';
|
||||
|
||||
import './LicenseSection.styles.scss';
|
||||
|
||||
@@ -12,15 +13,6 @@ function LicenseSection(): JSX.Element | null {
|
||||
const { notifications } = useNotifications();
|
||||
const [, handleCopyToClipboard] = useCopyToClipboard();
|
||||
|
||||
const getMaskedKey = (key: string): string => {
|
||||
if (!key || key.length < 4) {
|
||||
return key || 'N/A';
|
||||
}
|
||||
return `${key.substring(0, 2)}********${key
|
||||
.substring(key.length - 2)
|
||||
.trim()}`;
|
||||
};
|
||||
|
||||
const handleCopyKey = (text: string): void => {
|
||||
handleCopyToClipboard(text);
|
||||
notifications.success({
|
||||
|
||||
@@ -271,7 +271,7 @@ describe('MySettings Flows', () => {
|
||||
},
|
||||
});
|
||||
|
||||
expect(within(container).getByText('ab********cd')).toBeInTheDocument();
|
||||
expect(within(container).getByText('ab·······cd')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('Should not mask license key if it is too short', () => {
|
||||
|
||||
@@ -8,28 +8,15 @@ import { QueryBuilderV2 } from 'components/QueryBuilderV2/QueryBuilderV2';
|
||||
import TextToolTip from 'components/TextToolTip';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { QBShortcuts } from 'constants/shortcuts/QBShortcuts';
|
||||
import {
|
||||
getDefaultWidgetData,
|
||||
PANEL_TYPE_TO_QUERY_TYPES,
|
||||
} from 'container/NewWidget/utils';
|
||||
import { PANEL_TYPE_TO_QUERY_TYPES } from 'container/NewWidget/utils';
|
||||
import RunQueryBtn from 'container/QueryBuilder/components/RunQueryBtn/RunQueryBtn';
|
||||
// import { QueryBuilder } from 'container/QueryBuilder';
|
||||
import { QueryBuilderProps } from 'container/QueryBuilder/QueryBuilder.interfaces';
|
||||
import { useKeyboardHotkeys } from 'hooks/hotkeys/useKeyboardHotkeys';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useShareBuilderUrl } from 'hooks/queryBuilder/useShareBuilderUrl';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { defaultTo, isUndefined } from 'lodash-es';
|
||||
import { Atom, Terminal } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
getNextWidgets,
|
||||
getPreviousWidgets,
|
||||
getSelectedWidgetIndex,
|
||||
} from 'providers/Dashboard/util';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { EQueryType } from 'types/common/dashboard';
|
||||
|
||||
import ClickHouseQueryContainer from './QueryBuilder/clickHouse';
|
||||
@@ -40,77 +27,25 @@ function QuerySection({
|
||||
selectedGraph,
|
||||
queryRangeKey,
|
||||
isLoadingQueries,
|
||||
selectedWidget,
|
||||
dashboardVersion,
|
||||
dashboardId,
|
||||
dashboardName,
|
||||
isNewPanel,
|
||||
}: QueryProps): JSX.Element {
|
||||
const {
|
||||
currentQuery,
|
||||
handleRunQuery: handleRunQueryFromQueryBuilder,
|
||||
redirectWithQueryBuilderData,
|
||||
} = useQueryBuilder();
|
||||
const urlQuery = useUrlQuery();
|
||||
const { registerShortcut, deregisterShortcut } = useKeyboardHotkeys();
|
||||
|
||||
const { selectedDashboard, setSelectedDashboard } = useDashboard();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const { widgets } = selectedDashboard?.data || {};
|
||||
|
||||
const getWidget = useCallback(() => {
|
||||
const widgetId = urlQuery.get('widgetId');
|
||||
return defaultTo(
|
||||
widgets?.find((e) => e.id === widgetId),
|
||||
getDefaultWidgetData(widgetId || '', selectedGraph),
|
||||
);
|
||||
}, [urlQuery, widgets, selectedGraph]);
|
||||
|
||||
const selectedWidget = getWidget() as Widgets;
|
||||
|
||||
const { query } = selectedWidget;
|
||||
|
||||
useShareBuilderUrl({ defaultValue: query });
|
||||
|
||||
const handleStageQuery = useCallback(
|
||||
(query: Query): void => {
|
||||
if (selectedDashboard === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const selectedWidgetIndex = getSelectedWidgetIndex(
|
||||
selectedDashboard,
|
||||
selectedWidget.id,
|
||||
);
|
||||
|
||||
const previousWidgets = getPreviousWidgets(
|
||||
selectedDashboard,
|
||||
selectedWidgetIndex,
|
||||
);
|
||||
|
||||
const nextWidgets = getNextWidgets(selectedDashboard, selectedWidgetIndex);
|
||||
|
||||
setSelectedDashboard({
|
||||
...selectedDashboard,
|
||||
data: {
|
||||
...selectedDashboard?.data,
|
||||
widgets: [
|
||||
...previousWidgets,
|
||||
{
|
||||
...selectedWidget,
|
||||
query,
|
||||
},
|
||||
...nextWidgets,
|
||||
],
|
||||
},
|
||||
});
|
||||
handleRunQueryFromQueryBuilder();
|
||||
},
|
||||
[
|
||||
selectedDashboard,
|
||||
selectedWidget,
|
||||
setSelectedDashboard,
|
||||
handleRunQueryFromQueryBuilder,
|
||||
],
|
||||
);
|
||||
|
||||
const handleQueryCategoryChange = useCallback(
|
||||
(qCategory: string): void => {
|
||||
const currentQueryType = qCategory as EQueryType;
|
||||
@@ -123,19 +58,16 @@ function QuerySection({
|
||||
);
|
||||
|
||||
const handleRunQuery = (): void => {
|
||||
const widgetId = urlQuery.get('widgetId');
|
||||
const isNewPanel = isUndefined(widgets?.find((e) => e.id === widgetId));
|
||||
|
||||
logEvent('Panel Edit: Stage and run query', {
|
||||
dataSource: currentQuery.builder?.queryData?.[0]?.dataSource,
|
||||
panelType: selectedWidget.panelTypes,
|
||||
queryType: currentQuery.queryType,
|
||||
widgetId: selectedWidget.id,
|
||||
dashboardId: selectedDashboard?.id,
|
||||
dashboardName: selectedDashboard?.data.title,
|
||||
dashboardId,
|
||||
dashboardName,
|
||||
isNewPanel,
|
||||
});
|
||||
handleStageQuery(currentQuery);
|
||||
handleRunQueryFromQueryBuilder();
|
||||
};
|
||||
|
||||
const filterConfigs: QueryBuilderProps['filterConfigs'] = useMemo(() => {
|
||||
@@ -164,7 +96,7 @@ function QuerySection({
|
||||
panelType={selectedGraph}
|
||||
filterConfigs={filterConfigs}
|
||||
showTraceOperator={selectedGraph !== PANEL_TYPES.LIST}
|
||||
version={selectedDashboard?.data?.version || 'v3'}
|
||||
version={dashboardVersion || 'v3'}
|
||||
isListViewPanel={selectedGraph === PANEL_TYPES.LIST}
|
||||
queryComponents={queryComponents}
|
||||
signalSourceChangeEnabled
|
||||
@@ -204,7 +136,7 @@ function QuerySection({
|
||||
queryComponents,
|
||||
selectedGraph,
|
||||
filterConfigs,
|
||||
selectedDashboard?.data?.version,
|
||||
dashboardVersion,
|
||||
isDarkMode,
|
||||
]);
|
||||
|
||||
@@ -261,6 +193,11 @@ interface QueryProps {
|
||||
selectedGraph: PANEL_TYPES;
|
||||
queryRangeKey?: QueryKey;
|
||||
isLoadingQueries?: boolean;
|
||||
selectedWidget: Widgets;
|
||||
dashboardVersion?: string;
|
||||
dashboardId?: string;
|
||||
dashboardName?: string;
|
||||
isNewPanel?: boolean;
|
||||
}
|
||||
|
||||
export default QuerySection;
|
||||
|
||||
@@ -30,6 +30,8 @@ function LeftContainer({
|
||||
setRequestData,
|
||||
setQueryResponse,
|
||||
enableDrillDown = false,
|
||||
selectedDashboard,
|
||||
isNewPanel = false,
|
||||
}: WidgetGraphProps): JSX.Element {
|
||||
const { stagedQuery } = useQueryBuilder();
|
||||
|
||||
@@ -75,6 +77,11 @@ function LeftContainer({
|
||||
selectedGraph={selectedGraph}
|
||||
queryRangeKey={queryRangeKey}
|
||||
isLoadingQueries={queryResponse.isFetching}
|
||||
selectedWidget={selectedWidget}
|
||||
dashboardVersion={ENTITY_VERSION_V5}
|
||||
dashboardId={selectedDashboard?.id}
|
||||
dashboardName={selectedDashboard?.data.title}
|
||||
isNewPanel={isNewPanel}
|
||||
/>
|
||||
{selectedGraph === PANEL_TYPES.LIST && (
|
||||
<ExplorerColumnsRenderer
|
||||
|
||||
@@ -8,7 +8,6 @@ import userEvent from '@testing-library/user-event';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { AppContext } from 'providers/App/App';
|
||||
import { IAppContext } from 'providers/App/types';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { ErrorModalProvider } from 'providers/ErrorModalProvider';
|
||||
import { QueryBuilderProvider } from 'providers/QueryBuilder';
|
||||
import configureStore from 'redux-mock-store';
|
||||
@@ -96,9 +95,7 @@ const render = (ui: React.ReactElement): ReturnType<typeof rtlRender> =>
|
||||
<Provider store={createMockStore()}>
|
||||
<AppContext.Provider value={createMockAppContext() as IAppContext}>
|
||||
<ErrorModalProvider>
|
||||
<DashboardProvider>
|
||||
<QueryBuilderProvider>{ui}</QueryBuilderProvider>
|
||||
</DashboardProvider>
|
||||
<QueryBuilderProvider>{ui}</QueryBuilderProvider>
|
||||
</ErrorModalProvider>
|
||||
</AppContext.Provider>
|
||||
</Provider>
|
||||
|
||||
@@ -310,12 +310,12 @@ describe('Stacking bar in new panel', () => {
|
||||
|
||||
const { container, getByText } = render(
|
||||
<I18nextProvider i18n={i18n}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId="">
|
||||
<PreferenceContextProvider>
|
||||
<NewWidget
|
||||
dashboardId=""
|
||||
selectedDashboard={undefined}
|
||||
selectedGraph={PANEL_TYPES.BAR}
|
||||
fillSpans={undefined}
|
||||
yAxisUnit={undefined}
|
||||
/>
|
||||
</PreferenceContextProvider>
|
||||
</DashboardProvider>
|
||||
@@ -356,11 +356,11 @@ describe('when switching to BAR panel type', () => {
|
||||
|
||||
it('should preserve saved stacking value of true', async () => {
|
||||
const { getByTestId, getByText, container } = render(
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId="">
|
||||
<NewWidget
|
||||
dashboardId=""
|
||||
selectedDashboard={undefined}
|
||||
selectedGraph={PANEL_TYPES.BAR}
|
||||
fillSpans={undefined}
|
||||
yAxisUnit={undefined}
|
||||
/>
|
||||
</DashboardProvider>,
|
||||
);
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useTranslation } from 'react-i18next';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useSelector } from 'react-redux';
|
||||
import { generatePath, useParams } from 'react-router-dom';
|
||||
import { generatePath } from 'react-router-dom';
|
||||
import { WarningOutlined } from '@ant-design/icons';
|
||||
import { Button, Flex, Modal, Space, Typography } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -32,8 +32,7 @@ import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
|
||||
import { cloneDeep, defaultTo, isEmpty, isUndefined } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { DashboardWidgetPageParams } from 'pages/DashboardWidget';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useScrollToWidgetIdStore } from 'providers/Dashboard/helpers/scrollToWidgetIdHelper';
|
||||
import {
|
||||
clearSelectedRowWidgetId,
|
||||
getSelectedRowWidgetId,
|
||||
@@ -82,16 +81,15 @@ import {
|
||||
import './NewWidget.styles.scss';
|
||||
|
||||
function NewWidget({
|
||||
selectedDashboard,
|
||||
dashboardId,
|
||||
selectedGraph,
|
||||
enableDrillDown = false,
|
||||
}: NewWidgetProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const {
|
||||
selectedDashboard,
|
||||
setSelectedDashboard,
|
||||
setToScrollWidgetId,
|
||||
columnWidths,
|
||||
} = useDashboard();
|
||||
const setToScrollWidgetId = useScrollToWidgetIdStore(
|
||||
(s) => s.setToScrollWidgetId,
|
||||
);
|
||||
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
|
||||
@@ -136,8 +134,6 @@ function NewWidget({
|
||||
|
||||
const query = useUrlQuery();
|
||||
|
||||
const { dashboardId } = useParams<DashboardWidgetPageParams>();
|
||||
|
||||
const [isNewDashboard, setIsNewDashboard] = useState<boolean>(false);
|
||||
|
||||
const logEventCalledRef = useRef(false);
|
||||
@@ -283,11 +279,10 @@ function NewWidget({
|
||||
isLogScale,
|
||||
legendPosition,
|
||||
customLegendColors,
|
||||
columnWidths: columnWidths?.[selectedWidget?.id],
|
||||
columnWidths: selectedWidget.columnWidths,
|
||||
contextLinks,
|
||||
};
|
||||
});
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
columnUnits,
|
||||
currentQuery,
|
||||
@@ -310,8 +305,8 @@ function NewWidget({
|
||||
isLogScale,
|
||||
legendPosition,
|
||||
customLegendColors,
|
||||
columnWidths,
|
||||
contextLinks,
|
||||
selectedWidget.columnWidths,
|
||||
]);
|
||||
|
||||
const closeModal = (): void => {
|
||||
@@ -444,6 +439,19 @@ function NewWidget({
|
||||
globalSelectedInterval,
|
||||
]);
|
||||
|
||||
const navigateToDashboardPage = useCallback(() => {
|
||||
const params = new URLSearchParams();
|
||||
|
||||
const urlVariablesQueryString = query.get(QueryParams.variables);
|
||||
if (urlVariablesQueryString) {
|
||||
params.set(QueryParams.variables, urlVariablesQueryString);
|
||||
}
|
||||
|
||||
const search = params.toString() ? `?${params.toString()}` : '';
|
||||
|
||||
safeNavigate(generatePath(ROUTES.DASHBOARD, { dashboardId }) + search);
|
||||
}, [dashboardId, query, safeNavigate]);
|
||||
|
||||
const onClickSaveHandler = useCallback(() => {
|
||||
if (!selectedDashboard) {
|
||||
return;
|
||||
@@ -557,12 +565,9 @@ function NewWidget({
|
||||
};
|
||||
|
||||
updateDashboardMutation.mutateAsync(dashboard, {
|
||||
onSuccess: (updatedDashboard) => {
|
||||
setSelectedDashboard(updatedDashboard.data);
|
||||
onSuccess: () => {
|
||||
setToScrollWidgetId(selectedWidget?.id || '');
|
||||
safeNavigate({
|
||||
pathname: generatePath(ROUTES.DASHBOARD, { dashboardId }),
|
||||
});
|
||||
navigateToDashboardPage();
|
||||
},
|
||||
});
|
||||
}, [
|
||||
@@ -577,9 +582,8 @@ function NewWidget({
|
||||
preWidgets,
|
||||
updateDashboardMutation,
|
||||
widgets,
|
||||
setSelectedDashboard,
|
||||
setToScrollWidgetId,
|
||||
safeNavigate,
|
||||
navigateToDashboardPage,
|
||||
dashboardId,
|
||||
]);
|
||||
|
||||
@@ -588,12 +592,12 @@ function NewWidget({
|
||||
setDiscardModal(true);
|
||||
return;
|
||||
}
|
||||
safeNavigate(generatePath(ROUTES.DASHBOARD, { dashboardId }));
|
||||
}, [dashboardId, isQueryModified, safeNavigate]);
|
||||
navigateToDashboardPage();
|
||||
}, [isQueryModified, navigateToDashboardPage]);
|
||||
|
||||
const discardChanges = useCallback(() => {
|
||||
safeNavigate(generatePath(ROUTES.DASHBOARD, { dashboardId }));
|
||||
}, [dashboardId, safeNavigate]);
|
||||
navigateToDashboardPage();
|
||||
}, [navigateToDashboardPage]);
|
||||
|
||||
const setGraphHandler = (type: PANEL_TYPES): void => {
|
||||
setIsLoadingPanelData(true);
|
||||
@@ -627,22 +631,25 @@ function NewWidget({
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [query]);
|
||||
|
||||
const onSaveDashboard = useCallback((): void => {
|
||||
const isNewPanel = useMemo(() => {
|
||||
const widgetId = query.get('widgetId');
|
||||
const selectWidget = widgets?.find((e) => e.id === widgetId);
|
||||
const selectedWidget = widgets?.find((e) => e.id === widgetId);
|
||||
return isUndefined(selectedWidget);
|
||||
}, [query, widgets]);
|
||||
|
||||
const onSaveDashboard = useCallback((): void => {
|
||||
logEvent('Panel Edit: Save changes', {
|
||||
panelType: selectedWidget.panelTypes,
|
||||
dashboardId: selectedDashboard?.id,
|
||||
widgetId: selectedWidget.id,
|
||||
dashboardName: selectedDashboard?.data.title,
|
||||
queryType: currentQuery.queryType,
|
||||
isNewPanel: isUndefined(selectWidget),
|
||||
isNewPanel,
|
||||
dataSource: currentQuery?.builder?.queryData?.[0]?.dataSource,
|
||||
});
|
||||
setSaveModal(true);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
}, [isNewPanel]);
|
||||
|
||||
const isNewTraceLogsAvailable =
|
||||
currentQuery.queryType === EQueryType.QUERY_BUILDER &&
|
||||
@@ -732,12 +739,14 @@ function NewWidget({
|
||||
}
|
||||
const widgetId = query.get('widgetId') || '';
|
||||
const graphType = query.get('graphType') || '';
|
||||
const variables = query.get(QueryParams.variables) || '';
|
||||
const queryParams = {
|
||||
[QueryParams.expandedWidgetId]: widgetId,
|
||||
[QueryParams.graphType]: graphType,
|
||||
[QueryParams.compositeQuery]: encodeURIComponent(
|
||||
JSON.stringify(currentQuery),
|
||||
),
|
||||
[QueryParams.variables]: variables,
|
||||
};
|
||||
|
||||
const updatedSearch = createQueryParams(queryParams);
|
||||
@@ -818,6 +827,8 @@ function NewWidget({
|
||||
isLoadingPanelData={isLoadingPanelData}
|
||||
setQueryResponse={setQueryResponse}
|
||||
enableDrillDown={enableDrillDown}
|
||||
selectedDashboard={selectedDashboard}
|
||||
isNewPanel={isNewPanel}
|
||||
/>
|
||||
)}
|
||||
</OverlayScrollbar>
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Dispatch, SetStateAction } from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { IDashboardContext } from 'providers/Dashboard/types';
|
||||
import { SuccessResponse, Warning } from 'types/api';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
@@ -9,9 +10,9 @@ import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { timePreferance } from './RightContainer/timeItems';
|
||||
|
||||
export interface NewWidgetProps {
|
||||
dashboardId: string;
|
||||
selectedDashboard: IDashboardContext['selectedDashboard'];
|
||||
selectedGraph: PANEL_TYPES;
|
||||
yAxisUnit: Widgets['yAxisUnit'];
|
||||
fillSpans: Widgets['fillSpans'];
|
||||
enableDrillDown?: boolean;
|
||||
}
|
||||
|
||||
@@ -34,6 +35,8 @@ export interface WidgetGraphProps {
|
||||
>
|
||||
>;
|
||||
enableDrillDown?: boolean;
|
||||
selectedDashboard: IDashboardContext['selectedDashboard'];
|
||||
isNewPanel?: boolean;
|
||||
}
|
||||
|
||||
export type WidgetGraphContainerProps = {
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { ToggleGraphProps } from 'components/Graph/types';
|
||||
import Uplot from 'components/Uplot';
|
||||
import GraphManager from 'container/GridCardLayout/GridCard/FullView/GraphManager';
|
||||
import { getLocalStorageGraphVisibilityState } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
|
||||
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { getUplotHistogramChartOptions } from 'lib/uPlotLib/getUplotHistogramChartOptions';
|
||||
import _noop from 'lodash-es/noop';
|
||||
import { ContextMenu, useCoordinates } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { buildHistogramData } from './histogram';
|
||||
import { PanelWrapperProps } from './panelWrapper.types';
|
||||
|
||||
function HistogramPanelWrapper({
|
||||
queryResponse,
|
||||
widget,
|
||||
setGraphVisibility,
|
||||
graphVisibility,
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
onClickHandler,
|
||||
enableDrillDown = false,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const legendScrollPositionRef = useRef<number>(0);
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
const {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
clickedData,
|
||||
onClose,
|
||||
onClick,
|
||||
subMenu,
|
||||
setSubMenu,
|
||||
} = useCoordinates();
|
||||
const { menuItemsConfig } = useGraphContextMenu({
|
||||
widgetId: widget.id || '',
|
||||
query: widget.query,
|
||||
graphData: clickedData,
|
||||
onClose,
|
||||
coordinates,
|
||||
subMenu,
|
||||
setSubMenu,
|
||||
contextLinks: widget.contextLinks,
|
||||
panelType: widget.panelTypes,
|
||||
queryRange: queryResponse,
|
||||
});
|
||||
|
||||
const clickHandlerWithContextMenu = useCallback(
|
||||
(...args: any[]) => {
|
||||
const [
|
||||
,
|
||||
,
|
||||
,
|
||||
,
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
,
|
||||
focusedSeries,
|
||||
] = args;
|
||||
const data = getUplotClickData({
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
focusedSeries,
|
||||
});
|
||||
if (data && data?.record?.queryName) {
|
||||
onClick(data.coord, { ...data.record, label: data.label });
|
||||
}
|
||||
},
|
||||
[onClick],
|
||||
);
|
||||
|
||||
const histogramData = buildHistogramData(
|
||||
queryResponse.data?.payload.data.result,
|
||||
widget?.bucketWidth,
|
||||
widget?.bucketCount,
|
||||
widget?.mergeAllActiveQueries,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
const lineChartRef = useRef<ToggleGraphProps>();
|
||||
|
||||
useEffect(() => {
|
||||
const {
|
||||
graphVisibilityStates: localStoredVisibilityState,
|
||||
} = getLocalStorageGraphVisibilityState({
|
||||
apiResponse: queryResponse.data?.payload.data.result || [],
|
||||
name: widget.id,
|
||||
});
|
||||
if (setGraphVisibility) {
|
||||
setGraphVisibility(localStoredVisibilityState);
|
||||
}
|
||||
}, [
|
||||
queryResponse?.data?.payload?.data?.result,
|
||||
setGraphVisibility,
|
||||
widget.id,
|
||||
]);
|
||||
|
||||
const histogramOptions = useMemo(
|
||||
() =>
|
||||
getUplotHistogramChartOptions({
|
||||
id: widget.id,
|
||||
dimensions: containerDimensions,
|
||||
isDarkMode,
|
||||
apiResponse: queryResponse.data?.payload,
|
||||
histogramData,
|
||||
panelType: widget.panelTypes,
|
||||
setGraphsVisibilityStates: setGraphVisibility,
|
||||
graphsVisibilityStates: graphVisibility,
|
||||
mergeAllQueries: widget.mergeAllActiveQueries,
|
||||
onClickHandler: enableDrillDown
|
||||
? clickHandlerWithContextMenu
|
||||
: onClickHandler ?? _noop,
|
||||
legendScrollPosition: legendScrollPositionRef.current,
|
||||
setLegendScrollPosition: (position: number) => {
|
||||
legendScrollPositionRef.current = position;
|
||||
},
|
||||
}),
|
||||
[
|
||||
containerDimensions,
|
||||
graphVisibility,
|
||||
histogramData,
|
||||
isDarkMode,
|
||||
queryResponse.data?.payload,
|
||||
setGraphVisibility,
|
||||
widget.id,
|
||||
widget.mergeAllActiveQueries,
|
||||
widget.panelTypes,
|
||||
clickHandlerWithContextMenu,
|
||||
enableDrillDown,
|
||||
onClickHandler,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ height: '100%', width: '100%' }} ref={graphRef}>
|
||||
<Uplot options={histogramOptions} data={histogramData} ref={lineChartRef} />
|
||||
<ContextMenu
|
||||
coordinates={coordinates}
|
||||
popoverPosition={popoverPosition}
|
||||
title={menuItemsConfig.header as string}
|
||||
items={menuItemsConfig.items}
|
||||
onClose={onClose}
|
||||
/>
|
||||
{isFullViewMode && setGraphVisibility && !widget.mergeAllActiveQueries && (
|
||||
<GraphManager
|
||||
data={histogramData}
|
||||
name={widget.id}
|
||||
options={histogramOptions}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onToggleModelHandler={onToggleModelHandler}
|
||||
setGraphsVisibilityStates={setGraphVisibility}
|
||||
graphsVisibilityStates={graphVisibility}
|
||||
lineChartRef={lineChartRef}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HistogramPanelWrapper;
|
||||
@@ -1,4 +0,0 @@
|
||||
.info-text {
|
||||
margin-top: 8px;
|
||||
padding: 8px;
|
||||
}
|
||||
@@ -1,318 +0,0 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Alert } from 'antd';
|
||||
import { ToggleGraphProps } from 'components/Graph/types';
|
||||
import Uplot from 'components/Uplot';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import GraphManager from 'container/GridCardLayout/GridCard/FullView/GraphManager';
|
||||
import { getLocalStorageGraphVisibilityState } from 'container/GridCardLayout/GridCard/utils';
|
||||
import { getUplotClickData } from 'container/QueryTable/Drilldown/drilldownUtils';
|
||||
import useGraphContextMenu from 'container/QueryTable/Drilldown/useGraphContextMenu';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { getUPlotChartOptions } from 'lib/uPlotLib/getUplotChartOptions';
|
||||
import { getUPlotChartData } from 'lib/uPlotLib/utils/getUplotChartData';
|
||||
import { cloneDeep, isEqual, isUndefined } from 'lodash-es';
|
||||
import _noop from 'lodash-es/noop';
|
||||
import { ContextMenu, useCoordinates } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import uPlot from 'uplot';
|
||||
import { getSortedSeriesData } from 'utils/getSortedSeriesData';
|
||||
import { getTimeRange } from 'utils/getTimeRange';
|
||||
|
||||
import { PanelWrapperProps } from './panelWrapper.types';
|
||||
import { getTimeRangeFromStepInterval, isApmMetric } from './utils';
|
||||
|
||||
import './UplotPanelWrapper.styles.scss';
|
||||
|
||||
function UplotPanelWrapper({
|
||||
queryResponse,
|
||||
widget,
|
||||
isFullViewMode,
|
||||
setGraphVisibility,
|
||||
graphVisibility,
|
||||
onToggleModelHandler,
|
||||
onClickHandler,
|
||||
onDragSelect,
|
||||
selectedGraph,
|
||||
customTooltipElement,
|
||||
customSeries,
|
||||
enableDrillDown = false,
|
||||
}: PanelWrapperProps): JSX.Element {
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const lineChartRef = useRef<ToggleGraphProps>();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const legendScrollPositionRef = useRef<{
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}>({
|
||||
scrollTop: 0,
|
||||
scrollLeft: 0,
|
||||
});
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
const { currentQuery } = useQueryBuilder();
|
||||
|
||||
const [hiddenGraph, setHiddenGraph] = useState<{ [key: string]: boolean }>();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
setMinTimeScale(startTime);
|
||||
setMaxTimeScale(endTime);
|
||||
}, [queryResponse]);
|
||||
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const {
|
||||
coordinates,
|
||||
popoverPosition,
|
||||
clickedData,
|
||||
onClose,
|
||||
onClick,
|
||||
subMenu,
|
||||
setSubMenu,
|
||||
} = useCoordinates();
|
||||
const { menuItemsConfig } = useGraphContextMenu({
|
||||
widgetId: widget.id || '',
|
||||
query: widget.query,
|
||||
graphData: clickedData,
|
||||
onClose,
|
||||
coordinates,
|
||||
subMenu,
|
||||
setSubMenu,
|
||||
contextLinks: widget.contextLinks,
|
||||
panelType: widget.panelTypes,
|
||||
queryRange: queryResponse,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const {
|
||||
graphVisibilityStates: localStoredVisibilityState,
|
||||
} = getLocalStorageGraphVisibilityState({
|
||||
apiResponse: queryResponse.data?.payload.data.result || [],
|
||||
name: widget.id,
|
||||
});
|
||||
if (setGraphVisibility) {
|
||||
setGraphVisibility(localStoredVisibilityState);
|
||||
}
|
||||
}, [
|
||||
queryResponse?.data?.payload?.data?.result,
|
||||
setGraphVisibility,
|
||||
widget.id,
|
||||
]);
|
||||
|
||||
if (queryResponse.data && widget.panelTypes === PANEL_TYPES.BAR) {
|
||||
const sortedSeriesData = getSortedSeriesData(
|
||||
queryResponse.data?.payload.data.result,
|
||||
);
|
||||
queryResponse.data.payload.data.result = sortedSeriesData;
|
||||
}
|
||||
|
||||
const stackedBarChart = useMemo(
|
||||
() =>
|
||||
(selectedGraph
|
||||
? selectedGraph === PANEL_TYPES.BAR
|
||||
: widget?.panelTypes === PANEL_TYPES.BAR) && widget?.stackedBarChart,
|
||||
[selectedGraph, widget?.panelTypes, widget?.stackedBarChart],
|
||||
);
|
||||
|
||||
const chartData = useMemo(
|
||||
() =>
|
||||
getUPlotChartData(
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
),
|
||||
[
|
||||
queryResponse?.data?.payload,
|
||||
widget.fillSpans,
|
||||
stackedBarChart,
|
||||
hiddenGraph,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (widget.panelTypes === PANEL_TYPES.BAR && stackedBarChart) {
|
||||
const graphV = cloneDeep(graphVisibility)?.slice(1);
|
||||
const isSomeSelectedLegend = graphV?.some((v) => v === false);
|
||||
if (isSomeSelectedLegend) {
|
||||
const hiddenIndex = graphV?.findIndex((v) => v === true);
|
||||
if (!isUndefined(hiddenIndex) && hiddenIndex !== -1) {
|
||||
const updatedHiddenGraph = { [hiddenIndex]: true };
|
||||
if (!isEqual(hiddenGraph, updatedHiddenGraph)) {
|
||||
setHiddenGraph(updatedHiddenGraph);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [graphVisibility, hiddenGraph, widget.panelTypes, stackedBarChart]);
|
||||
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
const clickHandlerWithContextMenu = useCallback(
|
||||
(...args: any[]) => {
|
||||
const [
|
||||
xValue,
|
||||
,
|
||||
,
|
||||
,
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
axesData,
|
||||
focusedSeries,
|
||||
] = args;
|
||||
const data = getUplotClickData({
|
||||
metric,
|
||||
queryData,
|
||||
absoluteMouseX,
|
||||
absoluteMouseY,
|
||||
focusedSeries,
|
||||
});
|
||||
// Compute time range if needed and if axes data is available
|
||||
let timeRange;
|
||||
if (axesData && queryData?.queryName) {
|
||||
// Get the compositeQuery from the response params
|
||||
const compositeQuery = (queryResponse?.data?.params as any)?.compositeQuery;
|
||||
|
||||
if (compositeQuery?.queries) {
|
||||
// Find the specific query by name from the queries array
|
||||
const specificQuery = compositeQuery.queries.find(
|
||||
(query: any) => query.spec?.name === queryData.queryName,
|
||||
);
|
||||
|
||||
// Use the stepInterval from the specific query, fallback to default
|
||||
const stepInterval = specificQuery?.spec?.stepInterval || 60;
|
||||
timeRange = getTimeRangeFromStepInterval(
|
||||
stepInterval,
|
||||
metric?.clickedTimestamp || xValue, // Use the clicked timestamp if available, otherwise use the click position timestamp
|
||||
specificQuery?.spec?.signal === DataSource.METRICS &&
|
||||
isApmMetric(specificQuery?.spec?.aggregations[0]?.metricName),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (data && data?.record?.queryName) {
|
||||
onClick(data.coord, { ...data.record, label: data.label, timeRange });
|
||||
}
|
||||
},
|
||||
[onClick, queryResponse],
|
||||
);
|
||||
|
||||
const options = useMemo(
|
||||
() =>
|
||||
getUPlotChartOptions({
|
||||
id: widget?.id,
|
||||
apiResponse: queryResponse.data?.payload,
|
||||
dimensions: containerDimensions,
|
||||
isDarkMode,
|
||||
onDragSelect,
|
||||
yAxisUnit: widget?.yAxisUnit,
|
||||
onClickHandler: enableDrillDown
|
||||
? clickHandlerWithContextMenu
|
||||
: onClickHandler ?? _noop,
|
||||
thresholds: widget.thresholds,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
softMax: widget.softMax === undefined ? null : widget.softMax,
|
||||
softMin: widget.softMin === undefined ? null : widget.softMin,
|
||||
graphsVisibilityStates: graphVisibility,
|
||||
setGraphsVisibilityStates: setGraphVisibility,
|
||||
panelType: selectedGraph || widget.panelTypes,
|
||||
currentQuery,
|
||||
stackBarChart: stackedBarChart,
|
||||
hiddenGraph,
|
||||
setHiddenGraph,
|
||||
customTooltipElement,
|
||||
tzDate: (timestamp: number) =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value),
|
||||
timezone: timezone.value,
|
||||
customSeries,
|
||||
isLogScale: widget?.isLogScale,
|
||||
colorMapping: widget?.customLegendColors,
|
||||
enhancedLegend: true, // Enable enhanced legend
|
||||
legendPosition: widget?.legendPosition,
|
||||
query: widget?.query || currentQuery,
|
||||
legendScrollPosition: legendScrollPositionRef.current,
|
||||
setLegendScrollPosition: (position: {
|
||||
scrollTop: number;
|
||||
scrollLeft: number;
|
||||
}) => {
|
||||
legendScrollPositionRef.current = position;
|
||||
},
|
||||
decimalPrecision: widget.decimalPrecision,
|
||||
}),
|
||||
[
|
||||
queryResponse.data?.payload,
|
||||
containerDimensions,
|
||||
isDarkMode,
|
||||
onDragSelect,
|
||||
clickHandlerWithContextMenu,
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
graphVisibility,
|
||||
setGraphVisibility,
|
||||
selectedGraph,
|
||||
currentQuery,
|
||||
hiddenGraph,
|
||||
customTooltipElement,
|
||||
timezone.value,
|
||||
customSeries,
|
||||
enableDrillDown,
|
||||
onClickHandler,
|
||||
widget,
|
||||
stackedBarChart,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<div style={{ height: '100%', width: '100%' }} ref={graphRef}>
|
||||
<Uplot options={options} data={chartData} ref={lineChartRef} />
|
||||
<ContextMenu
|
||||
coordinates={coordinates}
|
||||
popoverPosition={popoverPosition}
|
||||
title={menuItemsConfig.header as string}
|
||||
items={menuItemsConfig.items}
|
||||
onClose={onClose}
|
||||
/>
|
||||
{stackedBarChart && isFullViewMode && (
|
||||
<Alert
|
||||
message="Selecting multiple legends is currently not supported in case of stacked bar charts"
|
||||
type="info"
|
||||
className="info-text"
|
||||
/>
|
||||
)}
|
||||
{isFullViewMode && setGraphVisibility && !stackedBarChart && (
|
||||
<GraphManager
|
||||
data={chartData}
|
||||
name={widget.id}
|
||||
options={options}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onToggleModelHandler={onToggleModelHandler}
|
||||
setGraphsVisibilityStates={setGraphVisibility}
|
||||
graphsVisibilityStates={graphVisibility}
|
||||
lineChartRef={lineChartRef}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default UplotPanelWrapper;
|
||||
@@ -38,6 +38,7 @@ export const routeConfig: Record<string, QueryParams[]> = {
|
||||
[ROUTES.MY_SETTINGS]: [QueryParams.resourceAttributes],
|
||||
[ROUTES.NOT_FOUND]: [QueryParams.resourceAttributes],
|
||||
[ROUTES.ORG_SETTINGS]: [QueryParams.resourceAttributes],
|
||||
[ROUTES.MEMBERS_SETTINGS]: [QueryParams.resourceAttributes],
|
||||
[ROUTES.PASSWORD_RESET]: [QueryParams.resourceAttributes],
|
||||
[ROUTES.SETTINGS]: [QueryParams.resourceAttributes],
|
||||
[ROUTES.SIGN_UP]: [QueryParams.resourceAttributes],
|
||||
|
||||
@@ -30,7 +30,6 @@ import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { addCustomTimeRange } from 'utils/customTimeRangeUtils';
|
||||
import { persistTimeDurationForRoute } from 'utils/metricsTimeStorageUtils';
|
||||
import { normalizeTimeToMs } from 'utils/timeUtils';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
@@ -235,7 +234,20 @@ function DateTimeSelection({
|
||||
|
||||
const updateLocalStorageForRoutes = useCallback(
|
||||
(value: Time | string): void => {
|
||||
persistTimeDurationForRoute(location.pathname, String(value));
|
||||
const preRoutes = getLocalStorageKey(LOCALSTORAGE.METRICS_TIME_IN_DURATION);
|
||||
if (preRoutes !== null) {
|
||||
const preRoutesObject = JSON.parse(preRoutes);
|
||||
|
||||
const preRoute = {
|
||||
...preRoutesObject,
|
||||
};
|
||||
preRoute[location.pathname] = value;
|
||||
|
||||
setLocalStorageKey(
|
||||
LOCALSTORAGE.METRICS_TIME_IN_DURATION,
|
||||
JSON.stringify(preRoute),
|
||||
);
|
||||
}
|
||||
},
|
||||
[location.pathname],
|
||||
);
|
||||
@@ -726,7 +738,6 @@ function DateTimeSelection({
|
||||
showRecentlyUsed={showRecentlyUsed}
|
||||
minTime={minTimeForDateTimePicker}
|
||||
maxTime={maxTimeForDateTimePicker}
|
||||
isModalTimeSelection={isModalTimeSelection}
|
||||
/>
|
||||
|
||||
{showAutoRefresh && selectedTime !== 'custom' && (
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
import { act, renderHook } from '@testing-library/react';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import { useZoomOut } from '../useZoomOut';
|
||||
|
||||
const mockDispatch = jest.fn();
|
||||
const mockSafeNavigate = jest.fn();
|
||||
const mockUrlQueryDelete = jest.fn();
|
||||
const mockUrlQuerySet = jest.fn();
|
||||
const mockUrlQueryToString = jest.fn(() => '');
|
||||
|
||||
interface MockAppState {
|
||||
globalTime: Pick<GlobalReducer, 'minTime' | 'maxTime'>;
|
||||
}
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
useDispatch: (): jest.Mock => mockDispatch,
|
||||
useSelector: <T>(selector: (state: MockAppState) => T): T => {
|
||||
const mockState: MockAppState = {
|
||||
globalTime: {
|
||||
minTime: 15 * 60 * 1000 * 1e6, // 15 min in nanoseconds
|
||||
maxTime: 30 * 60 * 1000 * 1e6, // 30 min in nanoseconds (mock for getNextZoomOutRange)
|
||||
},
|
||||
};
|
||||
return selector(mockState);
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('react-router-dom', () => ({
|
||||
useLocation: (): { pathname: string } => ({ pathname: '/logs-explorer' }),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): { safeNavigate: typeof mockSafeNavigate } => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
interface MockUrlQuery {
|
||||
delete: typeof mockUrlQueryDelete;
|
||||
set: typeof mockUrlQuerySet;
|
||||
get: () => null;
|
||||
toString: typeof mockUrlQueryToString;
|
||||
}
|
||||
|
||||
jest.mock('hooks/useUrlQuery', () => ({
|
||||
__esModule: true,
|
||||
default: (): MockUrlQuery => ({
|
||||
delete: mockUrlQueryDelete,
|
||||
set: mockUrlQuerySet,
|
||||
get: (): null => null,
|
||||
toString: mockUrlQueryToString,
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockGetNextZoomOutRange = jest.fn();
|
||||
jest.mock('lib/zoomOutUtils', () => ({
|
||||
getNextZoomOutRange: (
|
||||
...args: unknown[]
|
||||
): ReturnType<typeof mockGetNextZoomOutRange> =>
|
||||
mockGetNextZoomOutRange(...args),
|
||||
}));
|
||||
|
||||
describe('useZoomOut', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockUrlQueryToString.mockReturnValue('relativeTime=45m');
|
||||
});
|
||||
|
||||
it('should do nothing when isDisabled is true', () => {
|
||||
const { result } = renderHook(() => useZoomOut({ isDisabled: true }));
|
||||
|
||||
act(() => {
|
||||
result.current();
|
||||
});
|
||||
|
||||
expect(mockGetNextZoomOutRange).not.toHaveBeenCalled();
|
||||
expect(mockDispatch).not.toHaveBeenCalled();
|
||||
expect(mockSafeNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should do nothing when getNextZoomOutRange returns null', () => {
|
||||
mockGetNextZoomOutRange.mockReturnValue(null);
|
||||
|
||||
const { result } = renderHook(() => useZoomOut());
|
||||
|
||||
act(() => {
|
||||
result.current();
|
||||
});
|
||||
|
||||
expect(mockGetNextZoomOutRange).toHaveBeenCalled();
|
||||
expect(mockDispatch).not.toHaveBeenCalled();
|
||||
expect(mockSafeNavigate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should dispatch preset and update URL when result has preset', () => {
|
||||
mockGetNextZoomOutRange.mockReturnValue({
|
||||
range: [1000, 2000],
|
||||
preset: '45m',
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useZoomOut());
|
||||
|
||||
act(() => {
|
||||
result.current();
|
||||
});
|
||||
|
||||
expect(mockDispatch).toHaveBeenCalledWith(expect.any(Function));
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.startTime);
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.endTime);
|
||||
expect(mockUrlQuerySet).toHaveBeenCalledWith(QueryParams.relativeTime, '45m');
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/logs-explorer'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should dispatch custom range and update URL when result has no preset', () => {
|
||||
mockGetNextZoomOutRange.mockReturnValue({
|
||||
range: [1000000, 2000000],
|
||||
preset: null,
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useZoomOut());
|
||||
|
||||
act(() => {
|
||||
result.current();
|
||||
});
|
||||
|
||||
expect(mockDispatch).toHaveBeenCalledWith(expect.any(Function));
|
||||
expect(mockUrlQuerySet).toHaveBeenCalledWith(
|
||||
QueryParams.startTime,
|
||||
'1000000',
|
||||
);
|
||||
expect(mockUrlQuerySet).toHaveBeenCalledWith(QueryParams.endTime, '2000000');
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.relativeTime);
|
||||
expect(mockSafeNavigate).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/logs-explorer'),
|
||||
);
|
||||
});
|
||||
|
||||
it('should delete urlParamsToDelete when provided', () => {
|
||||
mockGetNextZoomOutRange.mockReturnValue({
|
||||
range: [1000, 2000],
|
||||
preset: '45m',
|
||||
});
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useZoomOut({
|
||||
urlParamsToDelete: [QueryParams.activeLogId],
|
||||
}),
|
||||
);
|
||||
|
||||
act(() => {
|
||||
result.current();
|
||||
});
|
||||
|
||||
expect(mockUrlQueryDelete).toHaveBeenCalledWith(QueryParams.activeLogId);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,339 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useDashboardVariablesFromLocalStorage } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import { useTransformDashboardVariables } from 'hooks/dashboard/useTransformDashboardVariables';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
jest.mock('hooks/dashboard/useDashboardFromLocalStorage');
|
||||
jest.mock('hooks/dashboard/useVariablesFromUrl');
|
||||
|
||||
const mockUseDashboardVariablesFromLocalStorage = useDashboardVariablesFromLocalStorage as jest.MockedFunction<
|
||||
typeof useDashboardVariablesFromLocalStorage
|
||||
>;
|
||||
const mockUseVariablesFromUrl = useVariablesFromUrl as jest.MockedFunction<
|
||||
typeof useVariablesFromUrl
|
||||
>;
|
||||
|
||||
const makeVariable = (
|
||||
overrides: Partial<IDashboardVariable> = {},
|
||||
): IDashboardVariable => ({
|
||||
id: 'existing-id',
|
||||
name: 'env',
|
||||
description: '',
|
||||
type: 'QUERY',
|
||||
sort: 'DISABLED',
|
||||
multiSelect: false,
|
||||
showALLOption: false,
|
||||
selectedValue: 'prod',
|
||||
...overrides,
|
||||
});
|
||||
|
||||
const makeDashboard = (
|
||||
variables: Record<string, IDashboardVariable>,
|
||||
): Dashboard => ({
|
||||
id: 'dash-1',
|
||||
createdAt: '',
|
||||
updatedAt: '',
|
||||
createdBy: '',
|
||||
updatedBy: '',
|
||||
data: {
|
||||
title: 'Test',
|
||||
variables,
|
||||
},
|
||||
});
|
||||
|
||||
const setupHook = (
|
||||
currentDashboard: Record<string, any> = {},
|
||||
urlVariables: Record<string, any> = {},
|
||||
): ReturnType<typeof useTransformDashboardVariables> => {
|
||||
mockUseDashboardVariablesFromLocalStorage.mockReturnValue({
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables: jest.fn(),
|
||||
});
|
||||
mockUseVariablesFromUrl.mockReturnValue({
|
||||
getUrlVariables: () => urlVariables,
|
||||
setUrlVariables: jest.fn(),
|
||||
updateUrlVariable: jest.fn(),
|
||||
});
|
||||
|
||||
const { result } = renderHook(() => useTransformDashboardVariables('dash-1'));
|
||||
return result.current;
|
||||
};
|
||||
|
||||
describe('useTransformDashboardVariables', () => {
|
||||
beforeEach(() => jest.clearAllMocks());
|
||||
|
||||
describe('order assignment', () => {
|
||||
it('assigns order starting from 0 to variables that have none', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({ id: 'id1', name: 'v1', order: undefined }),
|
||||
v2: makeVariable({ id: 'id2', name: 'v2', order: undefined }),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
const orders = Object.values(result.data.variables).map((v) => v.order);
|
||||
expect(orders).toContain(0);
|
||||
expect(orders).toContain(1);
|
||||
});
|
||||
|
||||
it('preserves existing order values', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({ id: 'id1', name: 'v1', order: 5 }),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.order).toBe(5);
|
||||
});
|
||||
|
||||
it('assigns unique orders across multiple variables that all lack an order', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({ id: 'id1', name: 'v1', order: undefined }),
|
||||
v2: makeVariable({ id: 'id2', name: 'v2', order: undefined }),
|
||||
v3: makeVariable({ id: 'id3', name: 'v3', order: undefined }),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
const orders = Object.values(result.data.variables).map((v) => v.order);
|
||||
// All three newly assigned orders must be distinct
|
||||
expect(new Set(orders).size).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ID assignment', () => {
|
||||
it('assigns a UUID to variables that have no id', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const variable = makeVariable({ name: 'v1' });
|
||||
(variable as any).id = undefined;
|
||||
const dashboard = makeDashboard({ v1: variable });
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.id).toMatch(
|
||||
/^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i,
|
||||
);
|
||||
});
|
||||
|
||||
it('preserves existing IDs', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({ id: 'keep-me', name: 'v1' }),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.id).toBe('keep-me');
|
||||
});
|
||||
});
|
||||
|
||||
describe('TEXTBOX backward compatibility', () => {
|
||||
it('copies textboxValue to defaultValue when defaultValue is missing', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'v1',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'hello',
|
||||
defaultValue: undefined,
|
||||
order: undefined,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.defaultValue).toBe('hello');
|
||||
});
|
||||
|
||||
it('does not overwrite an existing defaultValue', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'v1',
|
||||
type: 'TEXTBOX',
|
||||
textboxValue: 'old',
|
||||
defaultValue: 'keep',
|
||||
order: undefined,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.defaultValue).toBe('keep');
|
||||
});
|
||||
});
|
||||
|
||||
describe('localStorage merge', () => {
|
||||
it('applies localStorage selectedValue over DB value', () => {
|
||||
const { transformDashboardVariables } = setupHook({
|
||||
env: { selectedValue: 'staging', allSelected: false },
|
||||
});
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({ id: 'id1', name: 'env', selectedValue: 'prod' }),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.selectedValue).toBe('staging');
|
||||
});
|
||||
|
||||
it('applies localStorage allSelected over DB value', () => {
|
||||
const { transformDashboardVariables } = setupHook({
|
||||
env: { selectedValue: undefined, allSelected: true },
|
||||
});
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'env',
|
||||
allSelected: false,
|
||||
showALLOption: true,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.allSelected).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('URL variable override', () => {
|
||||
it('sets allSelected=true when URL value is __ALL__', () => {
|
||||
const { transformDashboardVariables } = setupHook(
|
||||
{ env: { selectedValue: 'prod', allSelected: false } },
|
||||
{ env: '__ALL__' },
|
||||
);
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'env',
|
||||
showALLOption: true,
|
||||
allSelected: false,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.allSelected).toBe(true);
|
||||
});
|
||||
|
||||
it('sets selectedValue from URL and clears allSelected when showALLOption is true', () => {
|
||||
const { transformDashboardVariables } = setupHook(
|
||||
{ env: { selectedValue: undefined, allSelected: true } },
|
||||
{ env: 'dev' },
|
||||
);
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'env',
|
||||
showALLOption: true,
|
||||
allSelected: true,
|
||||
multiSelect: false,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.selectedValue).toBe('dev');
|
||||
expect(result.data.variables.v1.allSelected).toBe(false);
|
||||
});
|
||||
|
||||
it('does not set allSelected=false when showALLOption is false', () => {
|
||||
const { transformDashboardVariables } = setupHook(
|
||||
{ env: { selectedValue: undefined, allSelected: true } },
|
||||
{ env: 'dev' },
|
||||
);
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'env',
|
||||
showALLOption: false,
|
||||
allSelected: true,
|
||||
multiSelect: false,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.selectedValue).toBe('dev');
|
||||
expect(result.data.variables.v1.allSelected).toBe(true);
|
||||
});
|
||||
|
||||
it('normalizes array URL value to single value for single-select variable', () => {
|
||||
const { transformDashboardVariables } = setupHook(
|
||||
{},
|
||||
{ env: ['prod', 'dev'] },
|
||||
);
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'env',
|
||||
multiSelect: false,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.selectedValue).toBe('prod');
|
||||
});
|
||||
|
||||
it('wraps single URL value in array for multi-select variable', () => {
|
||||
const { transformDashboardVariables } = setupHook({}, { env: 'prod' });
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({
|
||||
id: 'id1',
|
||||
name: 'env',
|
||||
multiSelect: true,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.selectedValue).toEqual(['prod']);
|
||||
});
|
||||
|
||||
it('looks up URL variable by variable id when name is absent', () => {
|
||||
const { transformDashboardVariables } = setupHook(
|
||||
{},
|
||||
{ 'var-uuid': 'fallback' },
|
||||
);
|
||||
const variable = makeVariable({ id: 'var-uuid', multiSelect: false });
|
||||
delete variable.name;
|
||||
const dashboard = makeDashboard({ v1: variable });
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables.v1.selectedValue).toBe('fallback');
|
||||
});
|
||||
});
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('returns data unchanged when there are no variables', () => {
|
||||
const { transformDashboardVariables } = setupHook();
|
||||
const dashboard = makeDashboard({});
|
||||
|
||||
const result = transformDashboardVariables(dashboard);
|
||||
|
||||
expect(result.data.variables).toEqual({});
|
||||
});
|
||||
|
||||
it('does not mutate the original dashboard', () => {
|
||||
const { transformDashboardVariables } = setupHook({
|
||||
env: { selectedValue: 'staging', allSelected: false },
|
||||
});
|
||||
const dashboard = makeDashboard({
|
||||
v1: makeVariable({ id: 'id1', name: 'env', selectedValue: 'prod' }),
|
||||
});
|
||||
const originalValue = dashboard.data.variables.v1.selectedValue;
|
||||
|
||||
transformDashboardVariables(dashboard);
|
||||
|
||||
expect(dashboard.data.variables.v1.selectedValue).toBe(originalValue);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -15,7 +15,7 @@ interface DashboardLocalStorageVariables {
|
||||
[id: string]: LocalStoreDashboardVariables;
|
||||
}
|
||||
|
||||
interface UseDashboardVariablesFromLocalStorageReturn {
|
||||
export interface UseDashboardVariablesFromLocalStorageReturn {
|
||||
currentDashboard: LocalStoreDashboardVariables;
|
||||
updateLocalStorageDashboardVariables: (
|
||||
id: string,
|
||||
|
||||
128
frontend/src/hooks/dashboard/useTransformDashboardVariables.ts
Normal file
128
frontend/src/hooks/dashboard/useTransformDashboardVariables.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import {
|
||||
useDashboardVariablesFromLocalStorage,
|
||||
UseDashboardVariablesFromLocalStorageReturn,
|
||||
} from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import useVariablesFromUrl, {
|
||||
UseVariablesFromUrlReturn,
|
||||
} from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { normalizeUrlValueForVariable } from 'providers/Dashboard/normalizeUrlValue';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { v4 as generateUUID } from 'uuid';
|
||||
|
||||
export function useTransformDashboardVariables(
|
||||
dashboardId: string,
|
||||
): Pick<UseVariablesFromUrlReturn, 'getUrlVariables' | 'updateUrlVariable'> &
|
||||
UseDashboardVariablesFromLocalStorageReturn & {
|
||||
transformDashboardVariables: (data: Dashboard) => Dashboard;
|
||||
} {
|
||||
const {
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboardVariablesFromLocalStorage(dashboardId);
|
||||
const { getUrlVariables, updateUrlVariable } = useVariablesFromUrl();
|
||||
|
||||
const mergeDBWithLocalStorage = (
|
||||
data: Dashboard,
|
||||
localStorageVariables: any,
|
||||
): Dashboard => {
|
||||
const updatedData = data;
|
||||
if (data && localStorageVariables) {
|
||||
const updatedVariables = data.data.variables;
|
||||
const variablesFromUrl = getUrlVariables();
|
||||
Object.keys(data.data.variables).forEach((variable) => {
|
||||
const variableData = data.data.variables[variable];
|
||||
|
||||
// values from url
|
||||
const urlVariable = variableData?.name
|
||||
? variablesFromUrl[variableData?.name] || variablesFromUrl[variableData.id]
|
||||
: variablesFromUrl[variableData.id];
|
||||
|
||||
let updatedVariable = {
|
||||
...data.data.variables[variable],
|
||||
...localStorageVariables[variableData.name as any],
|
||||
};
|
||||
|
||||
// respect the url variable if it is set, override the others
|
||||
if (!isEmpty(urlVariable)) {
|
||||
if (urlVariable === ALL_SELECTED_VALUE) {
|
||||
updatedVariable = {
|
||||
...updatedVariable,
|
||||
allSelected: true,
|
||||
};
|
||||
} else {
|
||||
// Normalize URL value to match variable's multiSelect configuration
|
||||
const normalizedValue = normalizeUrlValueForVariable(
|
||||
urlVariable,
|
||||
variableData,
|
||||
);
|
||||
|
||||
updatedVariable = {
|
||||
...updatedVariable,
|
||||
selectedValue: normalizedValue,
|
||||
// Only set allSelected to false if showALLOption is available
|
||||
...(updatedVariable?.showALLOption && { allSelected: false }),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
updatedVariables[variable] = updatedVariable;
|
||||
});
|
||||
updatedData.data.variables = updatedVariables;
|
||||
}
|
||||
return updatedData;
|
||||
};
|
||||
|
||||
// As we do not have order and ID's in the variables object, we have to process variables to add order and ID if they do not exist in the variables object
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const transformDashboardVariables = (data: Dashboard): Dashboard => {
|
||||
if (data && data.data && data.data.variables) {
|
||||
const clonedDashboardData = mergeDBWithLocalStorage(
|
||||
JSON.parse(JSON.stringify(data)),
|
||||
currentDashboard,
|
||||
);
|
||||
const { variables } = clonedDashboardData.data;
|
||||
const existingOrders: Set<number> = new Set();
|
||||
|
||||
for (const key in variables) {
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
if (variables.hasOwnProperty(key)) {
|
||||
const variable: IDashboardVariable = variables[key];
|
||||
|
||||
// Check if 'order' property doesn't exist or is undefined
|
||||
if (variable.order === undefined) {
|
||||
// Find a unique order starting from 0
|
||||
let order = 0;
|
||||
while (existingOrders.has(order)) {
|
||||
order += 1;
|
||||
}
|
||||
|
||||
variable.order = order;
|
||||
existingOrders.add(order);
|
||||
// ! BWC - Specific case for backward compatibility where textboxValue was used instead of defaultValue
|
||||
if (variable.type === 'TEXTBOX' && !variable.defaultValue) {
|
||||
variable.defaultValue = variable.textboxValue || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (variable.id === undefined) {
|
||||
variable.id = generateUUID();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return clonedDashboardData;
|
||||
}
|
||||
|
||||
return data;
|
||||
};
|
||||
|
||||
return {
|
||||
transformDashboardVariables,
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
};
|
||||
}
|
||||
@@ -11,7 +11,7 @@ export interface LocalStoreDashboardVariables {
|
||||
| IDashboardVariable['selectedValue'];
|
||||
}
|
||||
|
||||
interface UseVariablesFromUrlReturn {
|
||||
export interface UseVariablesFromUrlReturn {
|
||||
getUrlVariables: () => LocalStoreDashboardVariables;
|
||||
setUrlVariables: (variables: LocalStoreDashboardVariables) => void;
|
||||
updateUrlVariable: (
|
||||
|
||||
@@ -107,7 +107,6 @@ const useCreateAlerts = (widget?: Widgets, caller?: string): VoidFunction => {
|
||||
queryRangeMutation,
|
||||
dashboardVariables,
|
||||
dashboardDynamicVariables,
|
||||
selectedDashboard?.data.version,
|
||||
widget,
|
||||
]);
|
||||
};
|
||||
|
||||
@@ -23,10 +23,10 @@ export default {
|
||||
relations: {
|
||||
assignee: ['role'],
|
||||
create: ['metaresources'],
|
||||
delete: ['user', 'role', 'organization', 'metaresource'],
|
||||
delete: ['user', 'serviceaccount', 'role', 'organization', 'metaresource'],
|
||||
list: ['metaresources'],
|
||||
read: ['user', 'role', 'organization', 'metaresource'],
|
||||
update: ['user', 'role', 'organization', 'metaresource'],
|
||||
read: ['user', 'serviceaccount', 'role', 'organization', 'metaresource'],
|
||||
update: ['user', 'serviceaccount', 'role', 'organization', 'metaresource'],
|
||||
},
|
||||
},
|
||||
} as const;
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
import { useCallback, useRef } from 'react';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { getNextZoomOutRange } from 'lib/zoomOutUtils';
|
||||
import { UpdateTimeInterval } from 'store/actions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { persistTimeDurationForRoute } from 'utils/metricsTimeStorageUtils';
|
||||
|
||||
export interface UseZoomOutOptions {
|
||||
/** When true, the zoom out handler does nothing (e.g. when live logs are enabled) */
|
||||
isDisabled?: boolean;
|
||||
/** URL params to delete when zooming out (e.g. [QueryParams.activeLogId] for logs) */
|
||||
urlParamsToDelete?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Reusable hook for zoom-out functionality in explorers (logs, traces, etc.).
|
||||
* Computes the next time range using the zoom-out ladder, updates Redux global time,
|
||||
* and navigates with the new URL params.
|
||||
*/
|
||||
const EMPTY_PARAMS: string[] = [];
|
||||
|
||||
export function useZoomOut(options: UseZoomOutOptions = {}): () => void {
|
||||
const { isDisabled = false, urlParamsToDelete = EMPTY_PARAMS } = options;
|
||||
const urlParamsToDeleteRef = useRef(urlParamsToDelete);
|
||||
urlParamsToDeleteRef.current = urlParamsToDelete;
|
||||
|
||||
const dispatch = useDispatch();
|
||||
const { minTime, maxTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
const urlQuery = useUrlQuery();
|
||||
const location = useLocation();
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
|
||||
return useCallback((): void => {
|
||||
if (isDisabled) {
|
||||
return;
|
||||
}
|
||||
const minMs = Math.floor((minTime ?? 0) / 1e6);
|
||||
const maxMs = Math.floor((maxTime ?? 0) / 1e6);
|
||||
const result = getNextZoomOutRange(minMs, maxMs);
|
||||
if (!result) {
|
||||
return;
|
||||
}
|
||||
const [newStartMs, newEndMs] = result.range;
|
||||
const { preset } = result;
|
||||
|
||||
if (preset) {
|
||||
dispatch(UpdateTimeInterval(preset));
|
||||
urlQuery.delete(QueryParams.startTime);
|
||||
urlQuery.delete(QueryParams.endTime);
|
||||
urlQuery.set(QueryParams.relativeTime, preset);
|
||||
persistTimeDurationForRoute(location.pathname, preset);
|
||||
} else {
|
||||
dispatch(UpdateTimeInterval('custom', [newStartMs, newEndMs]));
|
||||
urlQuery.set(QueryParams.startTime, String(newStartMs));
|
||||
urlQuery.set(QueryParams.endTime, String(newEndMs));
|
||||
urlQuery.delete(QueryParams.relativeTime);
|
||||
}
|
||||
for (const param of urlParamsToDeleteRef.current) {
|
||||
urlQuery.delete(param);
|
||||
}
|
||||
safeNavigate(`${location.pathname}?${urlQuery.toString()}`);
|
||||
}, [
|
||||
dispatch,
|
||||
isDisabled,
|
||||
location.pathname,
|
||||
maxTime,
|
||||
minTime,
|
||||
safeNavigate,
|
||||
urlQuery,
|
||||
]);
|
||||
}
|
||||
@@ -1,147 +0,0 @@
|
||||
import {
|
||||
getNextDurationInLadder,
|
||||
getNextZoomOutRange,
|
||||
isZoomOutDisabled,
|
||||
ZoomOutResult,
|
||||
} from '../zoomOutUtils';
|
||||
|
||||
const MS_PER_MIN = 60 * 1000;
|
||||
const MS_PER_HOUR = 60 * MS_PER_MIN;
|
||||
const MS_PER_DAY = 24 * MS_PER_HOUR;
|
||||
const MS_PER_WEEK = 7 * MS_PER_DAY;
|
||||
|
||||
// Fixed "now" for deterministic tests: 2024-01-15 12:00:00 UTC
|
||||
const NOW_MS = 1705312800000;
|
||||
|
||||
describe('zoomOutUtils', () => {
|
||||
beforeEach(() => {
|
||||
jest.spyOn(Date, 'now').mockReturnValue(NOW_MS);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('getNextDurationInLadder', () => {
|
||||
it('should use 3x zoom out below 15m until reaching 15m', () => {
|
||||
expect(getNextDurationInLadder(1 * MS_PER_MIN)).toBe(3 * MS_PER_MIN);
|
||||
expect(getNextDurationInLadder(2 * MS_PER_MIN)).toBe(6 * MS_PER_MIN);
|
||||
expect(getNextDurationInLadder(3 * MS_PER_MIN)).toBe(9 * MS_PER_MIN);
|
||||
expect(getNextDurationInLadder(4 * MS_PER_MIN)).toBe(12 * MS_PER_MIN);
|
||||
expect(getNextDurationInLadder(5 * MS_PER_MIN)).toBe(15 * MS_PER_MIN); // cap at 15m
|
||||
expect(getNextDurationInLadder(6 * MS_PER_MIN)).toBe(15 * MS_PER_MIN); // 18m capped
|
||||
});
|
||||
|
||||
it('should return next step for each ladder rung from 15m onward', () => {
|
||||
expect(getNextDurationInLadder(10 * MS_PER_MIN)).toBe(15 * MS_PER_MIN);
|
||||
expect(getNextDurationInLadder(15 * MS_PER_MIN)).toBe(45 * MS_PER_MIN);
|
||||
expect(getNextDurationInLadder(45 * MS_PER_MIN)).toBe(2 * MS_PER_HOUR);
|
||||
expect(getNextDurationInLadder(2 * MS_PER_HOUR)).toBe(7 * MS_PER_HOUR);
|
||||
expect(getNextDurationInLadder(7 * MS_PER_HOUR)).toBe(21 * MS_PER_HOUR);
|
||||
expect(getNextDurationInLadder(21 * MS_PER_HOUR)).toBe(1 * MS_PER_DAY);
|
||||
expect(getNextDurationInLadder(1 * MS_PER_DAY)).toBe(2 * MS_PER_DAY);
|
||||
expect(getNextDurationInLadder(2 * MS_PER_DAY)).toBe(3 * MS_PER_DAY);
|
||||
expect(getNextDurationInLadder(3 * MS_PER_DAY)).toBe(1 * MS_PER_WEEK);
|
||||
expect(getNextDurationInLadder(1 * MS_PER_WEEK)).toBe(2 * MS_PER_WEEK);
|
||||
expect(getNextDurationInLadder(2 * MS_PER_WEEK)).toBe(30 * MS_PER_DAY);
|
||||
});
|
||||
|
||||
it('should return MAX when at or past 1 month (no wrap)', () => {
|
||||
expect(getNextDurationInLadder(30 * MS_PER_DAY)).toBe(30 * MS_PER_DAY);
|
||||
expect(getNextDurationInLadder(31 * MS_PER_DAY)).toBe(30 * MS_PER_DAY);
|
||||
});
|
||||
|
||||
it('should return next step for duration between ladder rungs', () => {
|
||||
expect(getNextDurationInLadder(1 * MS_PER_HOUR)).toBe(2 * MS_PER_HOUR);
|
||||
expect(getNextDurationInLadder(5 * MS_PER_HOUR)).toBe(7 * MS_PER_HOUR);
|
||||
expect(getNextDurationInLadder(12 * MS_PER_HOUR)).toBe(21 * MS_PER_HOUR);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getNextZoomOutRange', () => {
|
||||
it('should return null when duration is zero or negative', () => {
|
||||
expect(getNextZoomOutRange(NOW_MS, NOW_MS)).toBeNull();
|
||||
expect(getNextZoomOutRange(NOW_MS, NOW_MS - 1000)).toBeNull();
|
||||
});
|
||||
|
||||
it('should return center-anchored range and preset=null when new end does not exceed now (Phase 1)', () => {
|
||||
// 15m range centered well before now so zoom to 45m keeps end <= now
|
||||
// Center at now-30m: end = center + 22.5m = now - 7.5m <= now
|
||||
const centerMs = NOW_MS - 30 * MS_PER_MIN;
|
||||
const start15m = centerMs - 7.5 * MS_PER_MIN;
|
||||
const end15m = centerMs + 7.5 * MS_PER_MIN;
|
||||
const result = getNextZoomOutRange(start15m, end15m) as ZoomOutResult;
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result.preset).toBeNull(); // Phase 1: preserve center-anchored range, avoid GetMinMax "last X from now"
|
||||
const [newStart, newEnd] = result.range;
|
||||
expect(newEnd - newStart).toBe(45 * MS_PER_MIN);
|
||||
const newCenter = (newStart + newEnd) / 2;
|
||||
expect(Math.abs(newCenter - centerMs)).toBeLessThan(2000);
|
||||
expect(newEnd).toBeLessThanOrEqual(NOW_MS + 1000);
|
||||
});
|
||||
|
||||
it('should return end-anchored range when new end would exceed now (Phase 2)', () => {
|
||||
// 22hr range ending at now - zoom to 1d (24hr) would push end past now
|
||||
// Next ladder step from 22hr is 1d
|
||||
const start22h = NOW_MS - 22 * MS_PER_HOUR;
|
||||
const end22h = NOW_MS;
|
||||
const result = getNextZoomOutRange(start22h, end22h) as ZoomOutResult;
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result.preset).toBe('1d');
|
||||
const [newStart, newEnd] = result.range;
|
||||
expect(newEnd).toBe(NOW_MS); // End anchored at now
|
||||
expect(newStart).toBe(NOW_MS - 1 * MS_PER_DAY);
|
||||
});
|
||||
|
||||
it('should return correct preset for each ladder step', () => {
|
||||
const presets: [number, number, string][] = [
|
||||
[15 * MS_PER_MIN, 0, '45m'],
|
||||
[45 * MS_PER_MIN, 0, '2h'],
|
||||
[2 * MS_PER_HOUR, 0, '7h'],
|
||||
[7 * MS_PER_HOUR, 0, '21h'],
|
||||
[21 * MS_PER_HOUR, 0, '1d'],
|
||||
[1 * MS_PER_DAY, 0, '2d'],
|
||||
[2 * MS_PER_DAY, 0, '3d'],
|
||||
[3 * MS_PER_DAY, 0, '1w'],
|
||||
[1 * MS_PER_WEEK, 0, '2w'],
|
||||
[2 * MS_PER_WEEK, 0, '1month'],
|
||||
];
|
||||
|
||||
presets.forEach(([durationMs, offset, expectedPreset]) => {
|
||||
const end = NOW_MS - offset;
|
||||
const start = end - durationMs;
|
||||
const result = getNextZoomOutRange(start, end);
|
||||
expect(result?.preset).toBe(expectedPreset);
|
||||
});
|
||||
});
|
||||
|
||||
it('isZoomOutDisabled returns true when duration >= 1 month', () => {
|
||||
expect(isZoomOutDisabled(30 * MS_PER_DAY)).toBe(true);
|
||||
expect(isZoomOutDisabled(31 * MS_PER_DAY)).toBe(true);
|
||||
expect(isZoomOutDisabled(29 * MS_PER_DAY)).toBe(false);
|
||||
expect(isZoomOutDisabled(15 * MS_PER_MIN)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return null when at 1 month (no zoom out beyond max)', () => {
|
||||
const start1m = NOW_MS - 30 * MS_PER_DAY;
|
||||
const end1m = NOW_MS;
|
||||
const result = getNextZoomOutRange(start1m, end1m);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should zoom out 3x from 5m range to 15m then continue with ladder', () => {
|
||||
// 5m range ending at now → 3x = 15m
|
||||
const start5m = NOW_MS - 5 * MS_PER_MIN;
|
||||
const end5m = NOW_MS;
|
||||
const result = getNextZoomOutRange(start5m, end5m) as ZoomOutResult;
|
||||
|
||||
expect(result).not.toBeNull();
|
||||
expect(result.preset).toBe('15m');
|
||||
const [newStart, newEnd] = result.range;
|
||||
expect(newEnd - newStart).toBe(15 * MS_PER_MIN);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,139 +0,0 @@
|
||||
/**
|
||||
* Custom Time Picker zoom-out ladder:
|
||||
* - Until 1 day: 15m → 45m → 2hr → 7hr → 21hr
|
||||
* - Then fixed: 1d → 2d → 3d → 1w → 2w → 1m
|
||||
* - At 1 month: zoom out is disabled (max range)
|
||||
*/
|
||||
|
||||
import type {
|
||||
CustomTimeType,
|
||||
Time,
|
||||
} from 'container/TopNav/DateTimeSelectionV2/types';
|
||||
|
||||
const MS_PER_MIN = 60 * 1000;
|
||||
const MS_PER_HOUR = 60 * MS_PER_MIN;
|
||||
const MS_PER_DAY = 24 * MS_PER_HOUR;
|
||||
const MS_PER_WEEK = 7 * MS_PER_DAY;
|
||||
|
||||
const ZOOM_OUT_LADDER_MS: number[] = [
|
||||
15 * MS_PER_MIN, // 15m
|
||||
45 * MS_PER_MIN, // 45m
|
||||
2 * MS_PER_HOUR, // 2hr
|
||||
7 * MS_PER_HOUR, // 7hr
|
||||
21 * MS_PER_HOUR, // 21hr
|
||||
1 * MS_PER_DAY, // 1d
|
||||
2 * MS_PER_DAY, // 2d
|
||||
3 * MS_PER_DAY, // 3d
|
||||
1 * MS_PER_WEEK, // 1w
|
||||
2 * MS_PER_WEEK, // 2w
|
||||
30 * MS_PER_DAY, // 1m
|
||||
];
|
||||
|
||||
const LADDER_LAST_INDEX = ZOOM_OUT_LADDER_MS.length - 1;
|
||||
const MAX_DURATION = ZOOM_OUT_LADDER_MS[LADDER_LAST_INDEX];
|
||||
const MIN_LADDER_DURATION_MS = ZOOM_OUT_LADDER_MS[0]; // 15m - below this we use 3x
|
||||
|
||||
export const MAX_ZOOM_OUT_DURATION_MS = MAX_DURATION;
|
||||
|
||||
/** Returns true when zoom out should be disabled (range at or beyond 1 month) */
|
||||
export function isZoomOutDisabled(durationMs: number): boolean {
|
||||
return durationMs >= MAX_ZOOM_OUT_DURATION_MS;
|
||||
}
|
||||
|
||||
/** Preset labels for ladder steps supported by GetMinMax (shows "Last 15 minutes" etc. instead of "Custom") */
|
||||
const PRESET_FOR_DURATION_MS: Record<number, Time | CustomTimeType> = {
|
||||
[15 * MS_PER_MIN]: '15m',
|
||||
[45 * MS_PER_MIN]: '45m',
|
||||
[2 * MS_PER_HOUR]: '2h',
|
||||
[7 * MS_PER_HOUR]: '7h',
|
||||
[21 * MS_PER_HOUR]: '21h',
|
||||
[1 * MS_PER_DAY]: '1d',
|
||||
[2 * MS_PER_DAY]: '2d',
|
||||
[3 * MS_PER_DAY]: '3d',
|
||||
[1 * MS_PER_WEEK]: '1w',
|
||||
[2 * MS_PER_WEEK]: '2w',
|
||||
[30 * MS_PER_DAY]: '1month',
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the next duration in the zoom-out ladder for the given current duration.
|
||||
* Below 15m: zoom out 3x until we reach 15m, then continue with the ladder.
|
||||
* If at or past 1 month, returns MAX_DURATION (no zoom out - button is disabled).
|
||||
*/
|
||||
export function getNextDurationInLadder(durationMs: number): number {
|
||||
if (durationMs >= MAX_DURATION) {
|
||||
return MAX_DURATION; // No zoom out beyond 1 month
|
||||
}
|
||||
|
||||
// Below 15m: zoom out 3x until we reach 15m
|
||||
if (durationMs < MIN_LADDER_DURATION_MS) {
|
||||
const next = durationMs * 3;
|
||||
return Math.min(next, MIN_LADDER_DURATION_MS);
|
||||
}
|
||||
|
||||
// At or above 15m: use the fixed ladder
|
||||
for (let i = 0; i < ZOOM_OUT_LADDER_MS.length; i++) {
|
||||
if (ZOOM_OUT_LADDER_MS[i] > durationMs) {
|
||||
return ZOOM_OUT_LADDER_MS[i];
|
||||
}
|
||||
}
|
||||
|
||||
return MAX_DURATION;
|
||||
}
|
||||
|
||||
export interface ZoomOutResult {
|
||||
range: [number, number];
|
||||
/** Preset key (e.g. '15m') when range matches a preset - use for display instead of "Custom Date Range" */
|
||||
preset: Time | CustomTimeType | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the next zoomed-out time range.
|
||||
* Phase 1 (center-anchored): While new end <= now, expand from center.
|
||||
* Phase 2 (end-anchored at now): When new end would exceed now, anchor end at now and move start backward.
|
||||
*
|
||||
* @returns ZoomOutResult with range and preset (or null if no change)
|
||||
*/
|
||||
export function getNextZoomOutRange(
|
||||
startMs: number,
|
||||
endMs: number,
|
||||
): ZoomOutResult | null {
|
||||
const nowMs = Date.now();
|
||||
const durationMs = endMs - startMs;
|
||||
|
||||
if (durationMs <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const newDurationMs = getNextDurationInLadder(durationMs);
|
||||
|
||||
// No zoom out when already at max (1 month)
|
||||
if (newDurationMs <= durationMs) {
|
||||
return null;
|
||||
}
|
||||
const centerMs = startMs + durationMs / 2;
|
||||
const computedEndMs = centerMs + newDurationMs / 2;
|
||||
|
||||
let newStartMs: number;
|
||||
let newEndMs: number;
|
||||
|
||||
const isPhase1 = computedEndMs <= nowMs;
|
||||
if (isPhase1) {
|
||||
// Phase 1: center-anchored (historical range not ending at now)
|
||||
newStartMs = centerMs - newDurationMs / 2;
|
||||
newEndMs = computedEndMs;
|
||||
} else {
|
||||
// Phase 2: end-anchored at now
|
||||
newStartMs = nowMs - newDurationMs;
|
||||
newEndMs = nowMs;
|
||||
}
|
||||
|
||||
// Phase 2 only: use preset so GetMinMax produces "last X from now".
|
||||
// Phase 1: preset=null so the center-anchored range is preserved (GetMinMax would discard it).
|
||||
const preset = isPhase1 ? null : PRESET_FOR_DURATION_MS[newDurationMs] ?? null;
|
||||
|
||||
return {
|
||||
range: [Math.round(newStartMs), Math.round(newEndMs)],
|
||||
preset,
|
||||
};
|
||||
}
|
||||
@@ -1,3 +1,16 @@
|
||||
import { useParams } from 'react-router-dom';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import DashboardPage from './DashboardPage';
|
||||
|
||||
export default DashboardPage;
|
||||
function DashboardPageWithProvider(): JSX.Element {
|
||||
const { dashboardId } = useParams<{ dashboardId: string }>();
|
||||
|
||||
return (
|
||||
<DashboardProvider dashboardId={dashboardId}>
|
||||
<DashboardPage />
|
||||
</DashboardProvider>
|
||||
);
|
||||
}
|
||||
|
||||
export default DashboardPageWithProvider;
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
import { Route } from 'react-router-dom';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { rest, server } from 'mocks-server/server';
|
||||
import { render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import DashboardWidget from '../index';
|
||||
|
||||
const DASHBOARD_ID = 'dash-1';
|
||||
const WIDGET_ID = 'widget-abc';
|
||||
|
||||
const mockDashboardResponse = {
|
||||
status: 'success',
|
||||
data: {
|
||||
id: DASHBOARD_ID,
|
||||
createdAt: '2024-01-01T00:00:00Z',
|
||||
createdBy: 'test',
|
||||
updatedAt: '2024-01-01T00:00:00Z',
|
||||
updatedBy: 'test',
|
||||
isLocked: false,
|
||||
data: {
|
||||
collapsableRowsMigrated: true,
|
||||
description: '',
|
||||
name: '',
|
||||
panelMap: {},
|
||||
tags: [],
|
||||
title: 'Test Dashboard',
|
||||
uploadedGrafana: false,
|
||||
uuid: '',
|
||||
version: '',
|
||||
variables: {},
|
||||
widgets: [],
|
||||
layout: [],
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const mockSafeNavigate = jest.fn();
|
||||
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): { safeNavigate: jest.Mock } => ({
|
||||
safeNavigate: mockSafeNavigate,
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('container/NewWidget', () => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => <div data-testid="new-widget">NewWidget</div>,
|
||||
}));
|
||||
|
||||
// nuqs's useQueryState doesn't read from MemoryRouter, so we mock it to return
|
||||
// controlled values via the `mockQueryState` map below.
|
||||
const mockQueryState: Record<string, string | null> = {};
|
||||
|
||||
jest.mock('nuqs', () => ({
|
||||
...jest.requireActual('nuqs'),
|
||||
useQueryState: (key: string): [string | null, jest.Mock] => [
|
||||
mockQueryState[key] ?? null,
|
||||
jest.fn(),
|
||||
],
|
||||
}));
|
||||
|
||||
// Wrap component in a Route so useParams can resolve dashboardId
|
||||
function renderAtRoute(
|
||||
queryState: Record<string, string | null> = {},
|
||||
): ReturnType<typeof render> {
|
||||
Object.assign(mockQueryState, queryState);
|
||||
return render(
|
||||
<Route path="/dashboard/:dashboardId/new">
|
||||
<DashboardWidget />
|
||||
</Route>,
|
||||
undefined,
|
||||
{ initialRoute: `/dashboard/${DASHBOARD_ID}/new` },
|
||||
);
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
mockSafeNavigate.mockClear();
|
||||
Object.keys(mockQueryState).forEach((k) => delete mockQueryState[k]);
|
||||
});
|
||||
|
||||
describe('DashboardWidget', () => {
|
||||
it('redirects to dashboard when widgetId is missing', async () => {
|
||||
renderAtRoute({ graphType: PANEL_TYPES.TIME_SERIES });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSafeNavigate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const [navigatedTo] = mockSafeNavigate.mock.calls[0];
|
||||
expect(navigatedTo).toContain(`/dashboard/${DASHBOARD_ID}`);
|
||||
});
|
||||
|
||||
it('redirects to dashboard when graphType is missing', async () => {
|
||||
renderAtRoute({ widgetId: WIDGET_ID });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSafeNavigate).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const [navigatedTo] = mockSafeNavigate.mock.calls[0];
|
||||
expect(navigatedTo).toContain(`/dashboard/${DASHBOARD_ID}`);
|
||||
});
|
||||
|
||||
it('shows spinner while dashboard is loading', () => {
|
||||
server.use(
|
||||
rest.get(
|
||||
`http://localhost/api/v1/dashboards/${DASHBOARD_ID}`,
|
||||
(_req, res, ctx) => res(ctx.delay('infinite')),
|
||||
),
|
||||
);
|
||||
|
||||
renderAtRoute({ widgetId: WIDGET_ID, graphType: PANEL_TYPES.TIME_SERIES });
|
||||
|
||||
expect(screen.getByRole('img', { name: 'loading' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows error message when dashboard fetch fails', async () => {
|
||||
server.use(
|
||||
rest.get(
|
||||
`http://localhost/api/v1/dashboards/${DASHBOARD_ID}`,
|
||||
(_req, res, ctx) => res(ctx.status(500), ctx.json({ status: 'error' })),
|
||||
),
|
||||
);
|
||||
|
||||
renderAtRoute({ widgetId: WIDGET_ID, graphType: PANEL_TYPES.TIME_SERIES });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Something went wrong')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('renders NewWidget when dashboard loads successfully', async () => {
|
||||
server.use(
|
||||
rest.get(
|
||||
`http://localhost/api/v1/dashboards/${DASHBOARD_ID}`,
|
||||
(_req, res, ctx) => res(ctx.status(200), ctx.json(mockDashboardResponse)),
|
||||
),
|
||||
);
|
||||
|
||||
renderAtRoute({ widgetId: WIDGET_ID, graphType: PANEL_TYPES.TIME_SERIES });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('new-widget')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,50 +1,98 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { generatePath, useLocation, useParams } from 'react-router-dom';
|
||||
import { useQuery } from 'react-query';
|
||||
import { generatePath, useParams } from 'react-router-dom';
|
||||
import { Card, Typography } from 'antd';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import Spinner from 'components/Spinner';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { DASHBOARD_CACHE_TIME } from 'constants/queryCacheTime';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import NewWidget from 'container/NewWidget';
|
||||
import { isDrilldownEnabled } from 'container/QueryTable/Drilldown/drilldownUtils';
|
||||
import { useTransformDashboardVariables } from 'hooks/dashboard/useTransformDashboardVariables';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import useUrlQuery from 'hooks/useUrlQuery';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { parseAsStringEnum, useQueryState } from 'nuqs';
|
||||
import { setDashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
|
||||
function DashboardWidget(): JSX.Element | null {
|
||||
const { search } = useLocation();
|
||||
const { dashboardId } = useParams<DashboardWidgetPageParams>();
|
||||
const { dashboardId } = useParams<{
|
||||
dashboardId: string;
|
||||
}>();
|
||||
const [widgetId] = useQueryState('widgetId');
|
||||
const [graphType] = useQueryState(
|
||||
'graphType',
|
||||
parseAsStringEnum<PANEL_TYPES>(Object.values(PANEL_TYPES)),
|
||||
);
|
||||
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
|
||||
const [selectedGraph, setSelectedGraph] = useState<PANEL_TYPES>();
|
||||
|
||||
const { selectedDashboard, dashboardResponse } = useDashboard();
|
||||
|
||||
const params = useUrlQuery();
|
||||
|
||||
const widgetId = params.get('widgetId');
|
||||
const { data } = selectedDashboard || {};
|
||||
const { widgets } = data || {};
|
||||
|
||||
const selectedWidget = widgets?.find((e) => e.id === widgetId) as Widgets;
|
||||
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(search);
|
||||
const graphType = params.get('graphType') as PANEL_TYPES | null;
|
||||
|
||||
if (graphType === null) {
|
||||
if (!graphType || !widgetId) {
|
||||
safeNavigate(generatePath(ROUTES.DASHBOARD, { dashboardId }));
|
||||
} else {
|
||||
setSelectedGraph(graphType);
|
||||
} else if (!dashboardId) {
|
||||
safeNavigate(ROUTES.HOME);
|
||||
}
|
||||
}, [dashboardId, safeNavigate, search]);
|
||||
}, [graphType, widgetId, dashboardId, safeNavigate]);
|
||||
|
||||
if (selectedGraph === undefined || dashboardResponse.isLoading) {
|
||||
if (!widgetId || !graphType) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<DashboardWidgetInternal
|
||||
dashboardId={dashboardId}
|
||||
widgetId={widgetId}
|
||||
graphType={graphType}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
function DashboardWidgetInternal({
|
||||
dashboardId,
|
||||
widgetId,
|
||||
graphType,
|
||||
}: {
|
||||
dashboardId: string;
|
||||
widgetId: string;
|
||||
graphType: PANEL_TYPES;
|
||||
}): JSX.Element | null {
|
||||
const [selectedDashboard, setSelectedDashboard] = useState<
|
||||
Dashboard | undefined
|
||||
>(undefined);
|
||||
|
||||
const { transformDashboardVariables } = useTransformDashboardVariables(
|
||||
dashboardId,
|
||||
);
|
||||
|
||||
const {
|
||||
isFetching: isFetchingDashboardResponse,
|
||||
isError: isErrorDashboardResponse,
|
||||
} = useQuery([REACT_QUERY_KEY.DASHBOARD_BY_ID, dashboardId, widgetId], {
|
||||
enabled: true,
|
||||
queryFn: async () =>
|
||||
await getDashboard({
|
||||
id: dashboardId,
|
||||
}),
|
||||
refetchOnWindowFocus: false,
|
||||
cacheTime: DASHBOARD_CACHE_TIME,
|
||||
onSuccess: (response) => {
|
||||
const updatedDashboardData = transformDashboardVariables(response.data);
|
||||
setSelectedDashboard(updatedDashboardData);
|
||||
setDashboardVariablesStore({
|
||||
dashboardId,
|
||||
variables: updatedDashboardData.data.variables,
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
if (isFetchingDashboardResponse) {
|
||||
return <Spinner tip="Loading.." />;
|
||||
}
|
||||
|
||||
if (dashboardResponse.isError) {
|
||||
if (isErrorDashboardResponse) {
|
||||
return (
|
||||
<Card>
|
||||
<Typography>{SOMETHING_WENT_WRONG}</Typography>
|
||||
@@ -54,16 +102,11 @@ function DashboardWidget(): JSX.Element | null {
|
||||
|
||||
return (
|
||||
<NewWidget
|
||||
yAxisUnit={selectedWidget?.yAxisUnit}
|
||||
selectedGraph={selectedGraph}
|
||||
fillSpans={selectedWidget?.fillSpans}
|
||||
dashboardId={dashboardId}
|
||||
selectedGraph={graphType}
|
||||
enableDrillDown={isDrilldownEnabled()}
|
||||
selectedDashboard={selectedDashboard}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export interface DashboardWidgetPageParams {
|
||||
dashboardId: string;
|
||||
}
|
||||
|
||||
export default DashboardWidget;
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
} from 'mocks-server/__mockdata__/dashboards';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { DashboardProvider } from 'providers/Dashboard/Dashboard';
|
||||
import { fireEvent, render, waitFor } from 'tests/test-utils';
|
||||
|
||||
jest.mock('container/DashboardContainer/DashboardDescription/utils', () => ({
|
||||
@@ -19,11 +18,6 @@ jest.mock('container/DashboardContainer/DashboardDescription/utils', () => ({
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useLocation: jest.fn(),
|
||||
useRouteMatch: jest.fn().mockReturnValue({
|
||||
params: {
|
||||
dashboardId: 4,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockWindowOpen = jest.fn();
|
||||
@@ -47,9 +41,7 @@ describe('dashboard list page', () => {
|
||||
<MemoryRouter
|
||||
initialEntries={['/dashbords?columnKey=asgard&order=stones&page=1']}
|
||||
>
|
||||
<DashboardProvider>
|
||||
<DashboardsList />
|
||||
</DashboardProvider>
|
||||
<DashboardsList />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -71,9 +63,7 @@ describe('dashboard list page', () => {
|
||||
<MemoryRouter
|
||||
initialEntries={['/dashbords?columnKey=createdAt&order=descend&page=1']}
|
||||
>
|
||||
<DashboardProvider>
|
||||
<DashboardsList />
|
||||
</DashboardProvider>
|
||||
<DashboardsList />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -92,9 +82,7 @@ describe('dashboard list page', () => {
|
||||
'/dashbords?columnKey=createdAt&order=descend&page=1&search=tho',
|
||||
]}
|
||||
>
|
||||
<DashboardProvider>
|
||||
<DashboardsList />
|
||||
</DashboardProvider>
|
||||
<DashboardsList />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -135,9 +123,7 @@ describe('dashboard list page', () => {
|
||||
'/dashbords?columnKey=createdAt&order=descend&page=1&search=tho',
|
||||
]}
|
||||
>
|
||||
<DashboardProvider>
|
||||
<DashboardsList />
|
||||
</DashboardProvider>
|
||||
<DashboardsList />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -164,9 +150,7 @@ describe('dashboard list page', () => {
|
||||
'/dashbords?columnKey=createdAt&order=descend&page=1&search=tho',
|
||||
]}
|
||||
>
|
||||
<DashboardProvider>
|
||||
<DashboardsList />
|
||||
</DashboardProvider>
|
||||
<DashboardsList />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
@@ -196,9 +180,7 @@ describe('dashboard list page', () => {
|
||||
'/dashbords?columnKey=createdAt&order=descend&page=1&search=tho',
|
||||
]}
|
||||
>
|
||||
<DashboardProvider>
|
||||
<DashboardsList />
|
||||
</DashboardProvider>
|
||||
<DashboardsList />
|
||||
</MemoryRouter>,
|
||||
);
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ function SettingsPage(): JSX.Element {
|
||||
isAdmin &&
|
||||
(item.key === ROUTES.BILLING ||
|
||||
item.key === ROUTES.ORG_SETTINGS ||
|
||||
item.key === ROUTES.MEMBERS_SETTINGS ||
|
||||
item.key === ROUTES.MY_SETTINGS ||
|
||||
item.key === ROUTES.SHORTCUTS)
|
||||
),
|
||||
|
||||
@@ -36,6 +36,7 @@ export const getRoutes = (
|
||||
if (isWorkspaceBlocked && isAdmin) {
|
||||
settings.push(
|
||||
...organizationSettings(t),
|
||||
...membersSettings(t),
|
||||
...mySettings(t),
|
||||
...billingSettings(t),
|
||||
...keyboardShortcuts(t),
|
||||
|
||||
@@ -14,26 +14,21 @@ import { useTranslation } from 'react-i18next';
|
||||
import { useMutation, useQuery, UseQueryResult } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useDispatch, useSelector } from 'react-redux';
|
||||
import { useRouteMatch } from 'react-router-dom';
|
||||
import { Modal } from 'antd';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import locked from 'api/v1/dashboards/id/lock';
|
||||
import { ALL_SELECTED_VALUE } from 'components/NewSelect/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import dayjs, { Dayjs } from 'dayjs';
|
||||
import { useDashboardVariablesFromLocalStorage } from 'hooks/dashboard/useDashboardFromLocalStorage';
|
||||
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
|
||||
import { useTransformDashboardVariables } from 'hooks/dashboard/useTransformDashboardVariables';
|
||||
import useTabVisibility from 'hooks/useTabFocus';
|
||||
import { getUpdatedLayout } from 'lib/dashboard/getUpdatedLayout';
|
||||
import { getMinMaxForSelectedTime } from 'lib/getMinMax';
|
||||
import { defaultTo, isEmpty } from 'lodash-es';
|
||||
import { defaultTo } from 'lodash-es';
|
||||
import isEqual from 'lodash-es/isEqual';
|
||||
import isUndefined from 'lodash-es/isUndefined';
|
||||
import omitBy from 'lodash-es/omitBy';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
|
||||
import { normalizeUrlValueForVariable } from 'providers/Dashboard/normalizeUrlValue';
|
||||
import { useErrorModal } from 'providers/ErrorModalProvider';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { Dispatch } from 'redux';
|
||||
@@ -41,10 +36,9 @@ import { AppState } from 'store/reducers';
|
||||
import AppActions from 'types/actions';
|
||||
import { UPDATE_TIME_INTERVAL } from 'types/actions/globalTime';
|
||||
import { SuccessResponseV2 } from 'types/api';
|
||||
import { Dashboard, IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
import { Dashboard } from 'types/api/dashboard/getAll';
|
||||
import APIError from 'types/api/error';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as generateUUID } from 'uuid';
|
||||
|
||||
import {
|
||||
DASHBOARD_CACHE_TIME,
|
||||
@@ -75,8 +69,6 @@ export const DashboardContext = createContext<IDashboardContext>({
|
||||
setLayouts: () => {},
|
||||
setSelectedDashboard: () => {},
|
||||
updatedTimeRef: {} as React.MutableRefObject<Dayjs | null>,
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: () => {},
|
||||
updateLocalStorageDashboardVariables: () => {},
|
||||
dashboardQueryRangeCalled: false,
|
||||
setDashboardQueryRangeCalled: () => {},
|
||||
@@ -85,18 +77,13 @@ export const DashboardContext = createContext<IDashboardContext>({
|
||||
setColumnWidths: () => {},
|
||||
});
|
||||
|
||||
interface Props {
|
||||
dashboardId: string;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
export function DashboardProvider({
|
||||
children,
|
||||
}: PropsWithChildren): JSX.Element {
|
||||
dashboardId,
|
||||
}: PropsWithChildren<{ dashboardId: string }>): JSX.Element {
|
||||
const [isDashboardSliderOpen, setIsDashboardSlider] = useState<boolean>(false);
|
||||
|
||||
const [toScrollWidgetId, setToScrollWidgetId] = useState<string>('');
|
||||
|
||||
const [isDashboardLocked, setIsDashboardLocked] = useState<boolean>(false);
|
||||
|
||||
const [
|
||||
@@ -104,11 +91,6 @@ export function DashboardProvider({
|
||||
setDashboardQueryRangeCalled,
|
||||
] = useState<boolean>(false);
|
||||
|
||||
const isDashboardPage = useRouteMatch<Props>({
|
||||
path: ROUTES.DASHBOARD,
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const { showErrorModal } = useErrorModal();
|
||||
|
||||
const dispatch = useDispatch<Dispatch<AppActions>>();
|
||||
@@ -119,11 +101,6 @@ export function DashboardProvider({
|
||||
|
||||
const [onModal, Content] = Modal.useModal();
|
||||
|
||||
const isDashboardWidgetPage = useRouteMatch<Props>({
|
||||
path: ROUTES.DASHBOARD_WIDGET,
|
||||
exact: true,
|
||||
});
|
||||
|
||||
const [layouts, setLayouts] = useState<Layout[]>([]);
|
||||
|
||||
const [panelMap, setPanelMap] = useState<
|
||||
@@ -132,11 +109,6 @@ export function DashboardProvider({
|
||||
|
||||
const { isLoggedIn } = useAppContext();
|
||||
|
||||
const dashboardId =
|
||||
(isDashboardPage
|
||||
? isDashboardPage.params.dashboardId
|
||||
: isDashboardWidgetPage?.params.dashboardId) || '';
|
||||
|
||||
const [selectedDashboard, setSelectedDashboard] = useState<Dashboard>();
|
||||
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
|
||||
const savedDashboardId = useDashboardVariablesSelector((s) => s.dashboardId);
|
||||
@@ -161,9 +133,10 @@ export function DashboardProvider({
|
||||
const {
|
||||
currentDashboard,
|
||||
updateLocalStorageDashboardVariables,
|
||||
} = useDashboardVariablesFromLocalStorage(dashboardId);
|
||||
|
||||
const { getUrlVariables, updateUrlVariable } = useVariablesFromUrl();
|
||||
getUrlVariables,
|
||||
updateUrlVariable,
|
||||
transformDashboardVariables,
|
||||
} = useTransformDashboardVariables(dashboardId);
|
||||
|
||||
const updatedTimeRef = useRef<Dayjs | null>(null); // Using ref to store the updated time
|
||||
const modalRef = useRef<any>(null);
|
||||
@@ -175,108 +148,14 @@ export function DashboardProvider({
|
||||
|
||||
const [isDashboardFetching, setIsDashboardFetching] = useState<boolean>(false);
|
||||
|
||||
const mergeDBWithLocalStorage = (
|
||||
data: Dashboard,
|
||||
localStorageVariables: any,
|
||||
): Dashboard => {
|
||||
const updatedData = data;
|
||||
if (data && localStorageVariables) {
|
||||
const updatedVariables = data.data.variables;
|
||||
const variablesFromUrl = getUrlVariables();
|
||||
Object.keys(data.data.variables).forEach((variable) => {
|
||||
const variableData = data.data.variables[variable];
|
||||
|
||||
// values from url
|
||||
const urlVariable = variableData?.name
|
||||
? variablesFromUrl[variableData?.name] || variablesFromUrl[variableData.id]
|
||||
: variablesFromUrl[variableData.id];
|
||||
|
||||
let updatedVariable = {
|
||||
...data.data.variables[variable],
|
||||
...localStorageVariables[variableData.name as any],
|
||||
};
|
||||
|
||||
// respect the url variable if it is set, override the others
|
||||
if (!isEmpty(urlVariable)) {
|
||||
if (urlVariable === ALL_SELECTED_VALUE) {
|
||||
updatedVariable = {
|
||||
...updatedVariable,
|
||||
allSelected: true,
|
||||
};
|
||||
} else {
|
||||
// Normalize URL value to match variable's multiSelect configuration
|
||||
const normalizedValue = normalizeUrlValueForVariable(
|
||||
urlVariable,
|
||||
variableData,
|
||||
);
|
||||
|
||||
updatedVariable = {
|
||||
...updatedVariable,
|
||||
selectedValue: normalizedValue,
|
||||
// Only set allSelected to false if showALLOption is available
|
||||
...(updatedVariable?.showALLOption && { allSelected: false }),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
updatedVariables[variable] = updatedVariable;
|
||||
});
|
||||
updatedData.data.variables = updatedVariables;
|
||||
}
|
||||
return updatedData;
|
||||
};
|
||||
// As we do not have order and ID's in the variables object, we have to process variables to add order and ID if they do not exist in the variables object
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
const transformDashboardVariables = (data: Dashboard): Dashboard => {
|
||||
if (data && data.data && data.data.variables) {
|
||||
const clonedDashboardData = mergeDBWithLocalStorage(
|
||||
JSON.parse(JSON.stringify(data)),
|
||||
currentDashboard,
|
||||
);
|
||||
const { variables } = clonedDashboardData.data;
|
||||
const existingOrders: Set<number> = new Set();
|
||||
|
||||
for (const key in variables) {
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
if (variables.hasOwnProperty(key)) {
|
||||
const variable: IDashboardVariable = variables[key];
|
||||
|
||||
// Check if 'order' property doesn't exist or is undefined
|
||||
if (variable.order === undefined) {
|
||||
// Find a unique order starting from 0
|
||||
let order = 0;
|
||||
while (existingOrders.has(order)) {
|
||||
order += 1;
|
||||
}
|
||||
|
||||
variable.order = order;
|
||||
existingOrders.add(order);
|
||||
// ! BWC - Specific case for backward compatibility where textboxValue was used instead of defaultValue
|
||||
if (variable.type === 'TEXTBOX' && !variable.defaultValue) {
|
||||
variable.defaultValue = variable.textboxValue || '';
|
||||
}
|
||||
}
|
||||
|
||||
if (variable.id === undefined) {
|
||||
variable.id = generateUUID();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return clonedDashboardData;
|
||||
}
|
||||
|
||||
return data;
|
||||
};
|
||||
const dashboardResponse = useQuery(
|
||||
[
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
isDashboardPage?.params,
|
||||
dashboardId,
|
||||
globalTime.isAutoRefreshDisabled,
|
||||
],
|
||||
{
|
||||
enabled: (!!isDashboardPage || !!isDashboardWidgetPage) && isLoggedIn,
|
||||
enabled: !!dashboardId && isLoggedIn,
|
||||
queryFn: async () => {
|
||||
setIsDashboardFetching(true);
|
||||
try {
|
||||
@@ -299,13 +178,14 @@ export function DashboardProvider({
|
||||
},
|
||||
|
||||
onSuccess: (data: SuccessResponseV2<Dashboard>) => {
|
||||
// if the url variable is not set for any variable, set it to the default value
|
||||
const variables = data?.data?.data?.variables;
|
||||
const updatedDashboardData = transformDashboardVariables(data?.data);
|
||||
|
||||
// initialize URL variables after dashboard state is set to avoid race conditions
|
||||
const variables = updatedDashboardData?.data?.variables;
|
||||
if (variables) {
|
||||
initializeDefaultVariables(variables, getUrlVariables, updateUrlVariable);
|
||||
}
|
||||
|
||||
const updatedDashboardData = transformDashboardVariables(data?.data);
|
||||
const updatedDate = dayjs(updatedDashboardData?.updatedAt);
|
||||
|
||||
setIsDashboardLocked(updatedDashboardData?.locked || false);
|
||||
@@ -396,11 +276,7 @@ export function DashboardProvider({
|
||||
|
||||
useEffect(() => {
|
||||
// make the call on tab visibility only if the user is on dashboard / widget page
|
||||
if (
|
||||
isVisible &&
|
||||
updatedTimeRef.current &&
|
||||
(!!isDashboardPage || !!isDashboardWidgetPage)
|
||||
) {
|
||||
if (isVisible && updatedTimeRef.current && !!dashboardId) {
|
||||
dashboardResponse.refetch();
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
@@ -443,7 +319,6 @@ export function DashboardProvider({
|
||||
|
||||
const value: IDashboardContext = useMemo(
|
||||
() => ({
|
||||
toScrollWidgetId,
|
||||
isDashboardSliderOpen,
|
||||
isDashboardLocked,
|
||||
handleToggleDashboardSlider,
|
||||
@@ -457,7 +332,6 @@ export function DashboardProvider({
|
||||
setPanelMap,
|
||||
setSelectedDashboard,
|
||||
updatedTimeRef,
|
||||
setToScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
dashboardQueryRangeCalled,
|
||||
setDashboardQueryRangeCalled,
|
||||
@@ -474,7 +348,6 @@ export function DashboardProvider({
|
||||
dashboardId,
|
||||
layouts,
|
||||
panelMap,
|
||||
toScrollWidgetId,
|
||||
updateLocalStorageDashboardVariables,
|
||||
currentDashboard,
|
||||
dashboardQueryRangeCalled,
|
||||
|
||||
@@ -2,11 +2,10 @@ import { QueryClient, QueryClientProvider } from 'react-query';
|
||||
// eslint-disable-next-line no-restricted-imports
|
||||
import { useSelector } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import { render, RenderResult, screen, waitFor } from '@testing-library/react';
|
||||
import getDashboard from 'api/v1/dashboards/id/get';
|
||||
import { DASHBOARD_CACHE_TIME_ON_REFRESH_ENABLED } from 'constants/queryCacheTime';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DashboardProvider, useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { IDashboardVariable } from 'types/api/dashboard/getAll';
|
||||
|
||||
@@ -19,30 +18,28 @@ jest.mock('api/v1/dashboards/id/get');
|
||||
jest.mock('api/v1/dashboards/id/lock');
|
||||
const mockGetDashboard = jest.mocked(getDashboard);
|
||||
|
||||
// Mock useRouteMatch to simulate different route scenarios
|
||||
const mockUseRouteMatch = jest.fn();
|
||||
jest.mock('react-router-dom', () => ({
|
||||
...jest.requireActual('react-router-dom'),
|
||||
useRouteMatch: (): any => mockUseRouteMatch(),
|
||||
}));
|
||||
|
||||
// Mock other dependencies
|
||||
jest.mock('hooks/useSafeNavigate', () => ({
|
||||
useSafeNavigate: (): any => ({
|
||||
useSafeNavigate: (): { safeNavigate: jest.Mock } => ({
|
||||
safeNavigate: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock only the essential dependencies for Dashboard provider
|
||||
jest.mock('providers/App/App', () => ({
|
||||
useAppContext: (): any => ({
|
||||
useAppContext: (): {
|
||||
isLoggedIn: boolean;
|
||||
user: { email: string; role: string };
|
||||
} => ({
|
||||
isLoggedIn: true,
|
||||
user: { email: 'test@example.com', role: 'ADMIN' },
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('providers/ErrorModalProvider', () => ({
|
||||
useErrorModal: (): any => ({ showErrorModal: jest.fn() }),
|
||||
useErrorModal: (): { showErrorModal: jest.Mock } => ({
|
||||
showErrorModal: jest.fn(),
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock('react-redux', () => ({
|
||||
@@ -60,11 +57,10 @@ jest.mock('uuid', () => ({ v4: jest.fn(() => 'mock-uuid') }));
|
||||
function TestComponent(): JSX.Element {
|
||||
const { dashboardResponse, selectedDashboard } = useDashboard();
|
||||
const { dashboardVariables } = useDashboardVariables();
|
||||
const dashboardId = selectedDashboard?.id;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div data-testid="dashboard-id">{dashboardId}</div>
|
||||
<div data-testid="dashboard-id">{selectedDashboard?.id}</div>
|
||||
<div data-testid="query-status">{dashboardResponse.status}</div>
|
||||
<div data-testid="is-loading">{dashboardResponse.isLoading.toString()}</div>
|
||||
<div data-testid="is-fetching">
|
||||
@@ -94,27 +90,15 @@ function createTestQueryClient(): QueryClient {
|
||||
|
||||
// Helper to render with dashboard provider
|
||||
function renderWithDashboardProvider(
|
||||
initialRoute = '/dashboard/test-dashboard-id',
|
||||
routeMatchParams?: { dashboardId: string } | null,
|
||||
): any {
|
||||
dashboardId = 'test-dashboard-id',
|
||||
): RenderResult {
|
||||
const queryClient = createTestQueryClient();
|
||||
|
||||
// Mock the route match
|
||||
mockUseRouteMatch.mockReturnValue(
|
||||
routeMatchParams
|
||||
? {
|
||||
path: ROUTES.DASHBOARD,
|
||||
url: `/dashboard/${routeMatchParams.dashboardId}`,
|
||||
isExact: true,
|
||||
params: routeMatchParams,
|
||||
}
|
||||
: null,
|
||||
);
|
||||
const initialRoute = dashboardId ? `/dashboard/${dashboardId}` : '/dashboard';
|
||||
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[initialRoute]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId={dashboardId}>
|
||||
<TestComponent />
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
@@ -188,7 +172,7 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
describe('Query Key Behavior', () => {
|
||||
it('should include route params in query key when on dashboard page', async () => {
|
||||
const dashboardId = 'test-dashboard-id';
|
||||
renderWithDashboardProvider(`/dashboard/${dashboardId}`, { dashboardId });
|
||||
renderWithDashboardProvider(dashboardId);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: dashboardId });
|
||||
@@ -203,30 +187,17 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
const newDashboardId = 'new-dashboard-id';
|
||||
|
||||
// First render with initial dashboard ID
|
||||
const { rerender } = renderWithDashboardProvider(
|
||||
`/dashboard/${initialDashboardId}`,
|
||||
{
|
||||
dashboardId: initialDashboardId,
|
||||
},
|
||||
);
|
||||
const { rerender } = renderWithDashboardProvider(initialDashboardId);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: initialDashboardId });
|
||||
});
|
||||
|
||||
// Change route params to simulate navigation
|
||||
mockUseRouteMatch.mockReturnValue({
|
||||
path: ROUTES.DASHBOARD,
|
||||
url: `/dashboard/${newDashboardId}`,
|
||||
isExact: true,
|
||||
params: { dashboardId: newDashboardId },
|
||||
});
|
||||
|
||||
// Rerender with new route
|
||||
// Rerender with new dashboard ID prop
|
||||
rerender(
|
||||
<QueryClientProvider client={createTestQueryClient()}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${newDashboardId}`]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId={newDashboardId}>
|
||||
<TestComponent />
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
@@ -241,50 +212,24 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should not fetch when not on dashboard page', () => {
|
||||
// Mock no route match (not on dashboard page)
|
||||
mockUseRouteMatch.mockReturnValue(null);
|
||||
|
||||
renderWithDashboardProvider('/some-other-page', null);
|
||||
it('should not fetch when no dashboardId is provided', () => {
|
||||
renderWithDashboardProvider('');
|
||||
|
||||
// Should not call the API
|
||||
expect(mockGetDashboard).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle undefined route params gracefully', async () => {
|
||||
// Mock route match with undefined params
|
||||
mockUseRouteMatch.mockReturnValue({
|
||||
path: ROUTES.DASHBOARD,
|
||||
url: '/dashboard/undefined',
|
||||
isExact: true,
|
||||
params: undefined,
|
||||
});
|
||||
|
||||
renderWithDashboardProvider('/dashboard/undefined');
|
||||
|
||||
// Should not call API when params are undefined
|
||||
expect(mockGetDashboard).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Cache Behavior', () => {
|
||||
it('should create separate cache entries for different route params', async () => {
|
||||
it('should create separate cache entries for different dashboardIds', async () => {
|
||||
const queryClient = createTestQueryClient();
|
||||
const dashboardId1 = 'dashboard-1';
|
||||
const dashboardId2 = 'dashboard-2';
|
||||
|
||||
// First dashboard
|
||||
mockUseRouteMatch.mockReturnValue({
|
||||
path: ROUTES.DASHBOARD,
|
||||
url: `/dashboard/${dashboardId1}`,
|
||||
isExact: true,
|
||||
params: { dashboardId: dashboardId1 },
|
||||
});
|
||||
|
||||
const { rerender } = render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${dashboardId1}`]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId={dashboardId1}>
|
||||
<TestComponent />
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
@@ -295,18 +240,10 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: dashboardId1 });
|
||||
});
|
||||
|
||||
// Second dashboard
|
||||
mockUseRouteMatch.mockReturnValue({
|
||||
path: ROUTES.DASHBOARD,
|
||||
url: `/dashboard/${dashboardId2}`,
|
||||
isExact: true,
|
||||
params: { dashboardId: dashboardId2 },
|
||||
});
|
||||
|
||||
rerender(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${dashboardId2}`]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId={dashboardId2}>
|
||||
<TestComponent />
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
@@ -325,13 +262,11 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
expect(cacheKeys).toHaveLength(2);
|
||||
expect(cacheKeys[0]).toEqual([
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
{ dashboardId: dashboardId1 },
|
||||
dashboardId1,
|
||||
true, // globalTime.isAutoRefreshDisabled
|
||||
]);
|
||||
expect(cacheKeys[1]).toEqual([
|
||||
REACT_QUERY_KEY.DASHBOARD_BY_ID,
|
||||
{ dashboardId: dashboardId2 },
|
||||
dashboardId2,
|
||||
true, // globalTime.isAutoRefreshDisabled
|
||||
]);
|
||||
@@ -348,17 +283,10 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
const queryClient = createTestQueryClient();
|
||||
const dashboardId = 'auto-refresh-dashboard';
|
||||
|
||||
mockUseRouteMatch.mockReturnValue({
|
||||
path: ROUTES.DASHBOARD,
|
||||
url: `/dashboard/${dashboardId}`,
|
||||
isExact: true,
|
||||
params: { dashboardId },
|
||||
});
|
||||
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter initialEntries={[`/dashboard/${dashboardId}`]}>
|
||||
<DashboardProvider>
|
||||
<DashboardProvider dashboardId={dashboardId}>
|
||||
<TestComponent />
|
||||
</DashboardProvider>
|
||||
</MemoryRouter>
|
||||
@@ -375,7 +303,7 @@ describe('Dashboard Provider - Query Key with Route Params', () => {
|
||||
.find(
|
||||
(query) =>
|
||||
query.queryKey[0] === REACT_QUERY_KEY.DASHBOARD_BY_ID &&
|
||||
query.queryKey[3] === false,
|
||||
query.queryKey[2] === false,
|
||||
);
|
||||
expect(dashboardQuery).toBeDefined();
|
||||
expect((dashboardQuery as { cacheTime: number }).cacheTime).toBe(
|
||||
@@ -437,9 +365,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
// Empty URL variables - tests initialization flow
|
||||
mockGetUrlVariables.mockReturnValue({});
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -455,6 +381,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
multiSelect: false,
|
||||
allSelected: false,
|
||||
showALLOption: true,
|
||||
order: 0,
|
||||
},
|
||||
services: {
|
||||
id: 'svc-id',
|
||||
@@ -462,6 +389,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
multiSelect: true,
|
||||
allSelected: false,
|
||||
showALLOption: true,
|
||||
order: 1,
|
||||
},
|
||||
},
|
||||
mockGetUrlVariables,
|
||||
@@ -493,9 +421,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
.mockReturnValueOnce('development')
|
||||
.mockReturnValueOnce(['db', 'cache']);
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -555,9 +481,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
|
||||
mockGetUrlVariables.mockReturnValue(urlVariables);
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -593,9 +517,7 @@ describe('Dashboard Provider - URL Variables Integration', () => {
|
||||
.mockReturnValueOnce('development')
|
||||
.mockReturnValueOnce(['api']);
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
// Verify normalization was called with the specific values and variable configs
|
||||
@@ -662,9 +584,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -706,9 +626,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -751,9 +669,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
@@ -795,9 +711,7 @@ describe('Dashboard Provider - Textbox Variable Backward Compatibility', () => {
|
||||
} as any);
|
||||
/* eslint-enable @typescript-eslint/no-explicit-any */
|
||||
|
||||
renderWithDashboardProvider(`/dashboard/${DASHBOARD_ID}`, {
|
||||
dashboardId: DASHBOARD_ID,
|
||||
});
|
||||
renderWithDashboardProvider(DASHBOARD_ID);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockGetDashboard).toHaveBeenCalledWith({ id: DASHBOARD_ID });
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { create } from 'zustand';
|
||||
|
||||
interface ScrollToWidgetIdState {
|
||||
toScrollWidgetId: string;
|
||||
setToScrollWidgetId: (widgetId: string) => void;
|
||||
}
|
||||
|
||||
export const useScrollToWidgetIdStore = create<ScrollToWidgetIdState>(
|
||||
(set) => ({
|
||||
toScrollWidgetId: '',
|
||||
setToScrollWidgetId: (widgetId): void => set({ toScrollWidgetId: widgetId }),
|
||||
}),
|
||||
);
|
||||
@@ -23,8 +23,6 @@ export interface IDashboardContext {
|
||||
React.SetStateAction<Dashboard | undefined>
|
||||
>;
|
||||
updatedTimeRef: React.MutableRefObject<dayjs.Dayjs | null>;
|
||||
toScrollWidgetId: string;
|
||||
setToScrollWidgetId: React.Dispatch<React.SetStateAction<string>>;
|
||||
updateLocalStorageDashboardVariables: (
|
||||
id: string,
|
||||
selectedValue:
|
||||
|
||||
9
frontend/src/utils/maskedKey.ts
Normal file
9
frontend/src/utils/maskedKey.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* Masks a key string, showing only the first 2 and last 2 characters.
|
||||
*/
|
||||
export function getMaskedKey(key: string): string {
|
||||
if (!key || key.length < 4) {
|
||||
return key || 'N/A';
|
||||
}
|
||||
return `${key.substring(0, 2)}·······${key.slice(-2).trim()}`;
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import setLocalStorageKey from 'api/browser/localstorage/set';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
|
||||
/**
|
||||
* Updates the stored time duration for a route in localStorage.
|
||||
* Used by both DateTimeSelectionV2 (manual time pick) and useZoomOut (zoom out button).
|
||||
*
|
||||
* @param pathname - The route path (e.g. /infrastructure-monitoring/hosts)
|
||||
* @param value - The time value to store (preset string like '1w' or JSON string for custom range)
|
||||
*/
|
||||
export function persistTimeDurationForRoute(
|
||||
pathname: string,
|
||||
value: string,
|
||||
): void {
|
||||
const preRoutes = getLocalStorageKey(LOCALSTORAGE.METRICS_TIME_IN_DURATION);
|
||||
let preRoutesObject: Record<string, string> = {};
|
||||
try {
|
||||
preRoutesObject = preRoutes ? JSON.parse(preRoutes) : {};
|
||||
} catch {
|
||||
preRoutesObject = {};
|
||||
}
|
||||
const preRoute = { ...preRoutesObject, [pathname]: value };
|
||||
setLocalStorageKey(
|
||||
LOCALSTORAGE.METRICS_TIME_IN_DURATION,
|
||||
JSON.stringify(preRoute),
|
||||
);
|
||||
}
|
||||
@@ -72,7 +72,7 @@ func (service *Service) SyncServers(ctx context.Context) error {
|
||||
|
||||
service.serversMtx.Lock()
|
||||
for _, org := range orgs {
|
||||
config, err := service.getConfig(ctx, org.ID.StringValue())
|
||||
config, _, err := service.getConfig(ctx, org.ID.StringValue())
|
||||
if err != nil {
|
||||
service.settings.Logger().ErrorContext(ctx, "failed to get alertmanager config for org", "org_id", org.ID.StringValue(), "error", err)
|
||||
continue
|
||||
@@ -171,7 +171,7 @@ func (service *Service) Stop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (service *Service) newServer(ctx context.Context, orgID string) (*alertmanagerserver.Server, error) {
|
||||
config, err := service.getConfig(ctx, orgID)
|
||||
config, storedHash, err := service.getConfig(ctx, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -181,13 +181,16 @@ func (service *Service) newServer(ctx context.Context, orgID string) (*alertmana
|
||||
return nil, err
|
||||
}
|
||||
|
||||
beforeCompareAndSelectHash := config.StoreableConfig().Hash
|
||||
config, err = service.compareAndSelectConfig(ctx, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if beforeCompareAndSelectHash == config.StoreableConfig().Hash {
|
||||
// compare against the hash of the config stored in the DB (before overlays
|
||||
// were applied by getConfig). This ensures that overlay changes (e.g. new
|
||||
// defaults from an upstream upgrade or something similar) trigger a DB update
|
||||
// so that other code paths reading directly from the store see the up-to-date config.
|
||||
if storedHash == config.StoreableConfig().Hash {
|
||||
service.settings.Logger().DebugContext(ctx, "skipping config store update for org", "org_id", orgID, "hash", config.StoreableConfig().Hash)
|
||||
return server, nil
|
||||
}
|
||||
@@ -200,27 +203,33 @@ func (service *Service) newServer(ctx context.Context, orgID string) (*alertmana
|
||||
return server, nil
|
||||
}
|
||||
|
||||
func (service *Service) getConfig(ctx context.Context, orgID string) (*alertmanagertypes.Config, error) {
|
||||
// getConfig returns the config for the given orgID with overlays applied, along
|
||||
// with the hash that was stored in the DB before overlays. When no config exists
|
||||
// in the store yet the stored hash is empty.
|
||||
func (service *Service) getConfig(ctx context.Context, orgID string) (*alertmanagertypes.Config, string, error) {
|
||||
config, err := service.configStore.Get(ctx, orgID)
|
||||
var storedHash string
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
config, err = alertmanagertypes.NewDefaultConfig(service.config.Global, service.config.Route, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, "", err
|
||||
}
|
||||
} else {
|
||||
storedHash = config.StoreableConfig().Hash
|
||||
}
|
||||
|
||||
if err := config.SetGlobalConfig(service.config.Global); err != nil {
|
||||
return nil, err
|
||||
return nil, "", err
|
||||
}
|
||||
if err := config.SetRouteConfig(service.config.Route); err != nil {
|
||||
return nil, err
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return config, nil
|
||||
return config, storedHash, nil
|
||||
}
|
||||
|
||||
func (service *Service) compareAndSelectConfig(ctx context.Context, incomingConfig *alertmanagertypes.Config) (*alertmanagertypes.Config, error) {
|
||||
|
||||
@@ -2,9 +2,11 @@ module base
|
||||
|
||||
type user
|
||||
|
||||
type serviceaccount
|
||||
|
||||
type role
|
||||
relations
|
||||
define assignee: [user]
|
||||
define assignee: [user, serviceaccount]
|
||||
|
||||
type organisation
|
||||
relations
|
||||
|
||||
@@ -111,7 +111,12 @@ func (handler *handler) Update(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount.Update(req.Name, req.Email, req.Roles)
|
||||
err = serviceAccount.Update(req.Name, req.Email, req.Roles)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.Update(ctx, valuer.MustNewUUID(claims.OrgID), serviceAccount)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -147,7 +152,12 @@ func (handler *handler) UpdateStatus(rw http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
serviceAccount.UpdateStatus(req.Status)
|
||||
err = serviceAccount.UpdateStatus(req.Status)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
}
|
||||
|
||||
err = handler.module.UpdateStatus(ctx, valuer.MustNewUUID(claims.OrgID), serviceAccount)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
@@ -290,7 +300,7 @@ func (handler *handler) UpdateFactorAPIKey(rw http.ResponseWriter, r *http.Reque
|
||||
}
|
||||
|
||||
factorAPIKey.Update(req.Name, req.ExpiresAt)
|
||||
err = handler.module.UpdateFactorAPIKey(ctx, serviceAccount.ID, factorAPIKey)
|
||||
err = handler.module.UpdateFactorAPIKey(ctx, valuer.MustNewUUID(claims.OrgID), serviceAccount.ID, factorAPIKey)
|
||||
if err != nil {
|
||||
render.Error(rw, err)
|
||||
return
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/emailing"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/serviceaccount"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
@@ -33,7 +34,7 @@ func (module *module) Create(ctx context.Context, orgID valuer.UUID, serviceAcco
|
||||
}
|
||||
|
||||
// authz actions cannot run in sql transactions
|
||||
err = module.authz.Grant(ctx, orgID, serviceAccount.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, serviceAccount.ID.String(), orgID, nil))
|
||||
err = module.authz.Grant(ctx, orgID, serviceAccount.Roles, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, serviceAccount.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -60,6 +61,24 @@ func (module *module) Create(ctx context.Context, orgID valuer.UUID, serviceAcco
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) GetOrCreate(ctx context.Context, serviceAccount *serviceaccounttypes.ServiceAccount) (*serviceaccounttypes.ServiceAccount, error) {
|
||||
existingServiceAccount, err := module.store.GetActiveByOrgIDAndName(ctx, serviceAccount.OrgID, serviceAccount.Name)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingServiceAccount != nil {
|
||||
return serviceAccount, nil
|
||||
}
|
||||
|
||||
err = module.Create(ctx, serviceAccount.OrgID, serviceAccount)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return serviceAccount, nil
|
||||
}
|
||||
|
||||
func (module *module) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID) (*serviceaccounttypes.ServiceAccount, error) {
|
||||
storableServiceAccount, err := module.store.Get(ctx, orgID, id)
|
||||
if err != nil {
|
||||
@@ -138,7 +157,7 @@ func (module *module) Update(ctx context.Context, orgID valuer.UUID, input *serv
|
||||
|
||||
// gets the role diff if any to modify grants.
|
||||
grants, revokes := serviceAccount.PatchRoles(input)
|
||||
err = module.authz.ModifyGrant(ctx, orgID, revokes, grants, authtypes.MustNewSubject(authtypes.TypeableUser, serviceAccount.ID.String(), orgID, nil))
|
||||
err = module.authz.ModifyGrant(ctx, orgID, revokes, grants, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, serviceAccount.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -171,26 +190,28 @@ func (module *module) Update(ctx context.Context, orgID valuer.UUID, input *serv
|
||||
}
|
||||
|
||||
func (module *module) UpdateStatus(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
serviceAccount, err := module.Get(ctx, orgID, input.ID)
|
||||
err := module.authz.Revoke(ctx, orgID, input.Roles, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, input.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if input.Status == serviceAccount.Status {
|
||||
return nil
|
||||
}
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// revoke all the API keys on disable
|
||||
err := module.store.RevokeAllFactorAPIKeys(ctx, input.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch input.Status {
|
||||
case serviceaccounttypes.StatusActive:
|
||||
err := module.activateServiceAccount(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case serviceaccounttypes.StatusDisabled:
|
||||
err := module.disableServiceAccount(ctx, orgID, input)
|
||||
// update the status but do not delete the role mappings as we will use them for audits
|
||||
err = module.store.Update(ctx, orgID, serviceaccounttypes.NewStorableServiceAccount(input))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -203,7 +224,7 @@ func (module *module) Delete(ctx context.Context, orgID valuer.UUID, id valuer.U
|
||||
}
|
||||
|
||||
// revoke from authz first as this cannot run in sql transaction
|
||||
err = module.authz.Revoke(ctx, orgID, serviceAccount.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, serviceAccount.ID.String(), orgID, nil))
|
||||
err = module.authz.Revoke(ctx, orgID, serviceAccount.Roles, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, serviceAccount.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -276,8 +297,13 @@ func (module *module) ListFactorAPIKey(ctx context.Context, serviceAccountID val
|
||||
return serviceaccounttypes.NewFactorAPIKeyFromStorables(storables), nil
|
||||
}
|
||||
|
||||
func (module *module) UpdateFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, factorAPIKey *serviceaccounttypes.FactorAPIKey) error {
|
||||
return module.store.UpdateFactorAPIKey(ctx, serviceAccountID, serviceaccounttypes.NewStorableFactorAPIKey(factorAPIKey))
|
||||
func (module *module) UpdateFactorAPIKey(ctx context.Context, _ valuer.UUID, serviceAccountID valuer.UUID, factorAPIKey *serviceaccounttypes.FactorAPIKey) error {
|
||||
err := module.store.UpdateFactorAPIKey(ctx, serviceAccountID, serviceaccounttypes.NewStorableFactorAPIKey(factorAPIKey))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) RevokeFactorAPIKey(ctx context.Context, serviceAccountID valuer.UUID, id valuer.UUID) error {
|
||||
@@ -307,45 +333,3 @@ func (module *module) RevokeFactorAPIKey(ctx context.Context, serviceAccountID v
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) disableServiceAccount(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
err := module.authz.Revoke(ctx, orgID, input.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// revoke all the API keys on disable
|
||||
err := module.store.RevokeAllFactorAPIKeys(ctx, input.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// update the status but do not delete the role mappings as we will reuse them on activation.
|
||||
err = module.Update(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *module) activateServiceAccount(ctx context.Context, orgID valuer.UUID, input *serviceaccounttypes.ServiceAccount) error {
|
||||
err := module.authz.Grant(ctx, orgID, input.Roles, authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.String(), orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.Update(ctx, orgID, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -48,6 +48,25 @@ func (store *store) Get(ctx context.Context, orgID valuer.UUID, id valuer.UUID)
|
||||
return storable, nil
|
||||
}
|
||||
|
||||
func (store *store) GetActiveByOrgIDAndName(ctx context.Context, orgID valuer.UUID, name string) (*serviceaccounttypes.StorableServiceAccount, error) {
|
||||
storable := new(serviceaccounttypes.StorableServiceAccount)
|
||||
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(storable).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("name = ?", name).
|
||||
Where("status = ?", serviceaccounttypes.StatusActive).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, serviceaccounttypes.ErrCodeServiceAccountNotFound, "service account with name: %s doesn't exist in org: %s", name, orgID.String())
|
||||
}
|
||||
|
||||
return storable, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByID(ctx context.Context, id valuer.UUID) (*serviceaccounttypes.StorableServiceAccount, error) {
|
||||
storable := new(serviceaccounttypes.StorableServiceAccount)
|
||||
|
||||
@@ -188,7 +207,7 @@ func (store *store) CreateFactorAPIKey(ctx context.Context, storable *serviceacc
|
||||
Model(storable).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, serviceaccounttypes.ErrCodeServiceAccountFactorAPIKeyAlreadyExists, "api key with name: %s already exists for service account: %s", storable.Name, storable.ServiceAccountID)
|
||||
return store.sqlstore.WrapAlreadyExistsErrf(err, serviceaccounttypes.ErrCodeAPIKeyAlreadyExists, "api key with name: %s already exists for service account: %s", storable.Name, storable.ServiceAccountID)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -206,7 +225,7 @@ func (store *store) GetFactorAPIKey(ctx context.Context, serviceAccountID valuer
|
||||
Where("service_account_id = ?", serviceAccountID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, serviceaccounttypes.ErrCodeServiceAccounFactorAPIKeytNotFound, "api key with id: %s doesn't exist for service account: %s", id, serviceAccountID)
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, serviceaccounttypes.ErrCodeAPIKeytNotFound, "api key with id: %s doesn't exist for service account: %s", id, serviceAccountID)
|
||||
}
|
||||
|
||||
return storable, nil
|
||||
|
||||
@@ -15,6 +15,9 @@ type Module interface {
|
||||
// Gets a service account by id.
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*serviceaccounttypes.ServiceAccount, error)
|
||||
|
||||
// Gets or creates a service account by name
|
||||
GetOrCreate(context.Context, *serviceaccounttypes.ServiceAccount) (*serviceaccounttypes.ServiceAccount, error)
|
||||
|
||||
// Gets a service account by id without fetching roles.
|
||||
GetWithoutRoles(context.Context, valuer.UUID, valuer.UUID) (*serviceaccounttypes.ServiceAccount, error)
|
||||
|
||||
@@ -40,7 +43,7 @@ type Module interface {
|
||||
ListFactorAPIKey(context.Context, valuer.UUID) ([]*serviceaccounttypes.FactorAPIKey, error)
|
||||
|
||||
// Updates an existing API key for a service account
|
||||
UpdateFactorAPIKey(context.Context, valuer.UUID, *serviceaccounttypes.FactorAPIKey) error
|
||||
UpdateFactorAPIKey(context.Context, valuer.UUID, valuer.UUID, *serviceaccounttypes.FactorAPIKey) error
|
||||
|
||||
// Revokes an existing API key for a service account
|
||||
RevokeFactorAPIKey(context.Context, valuer.UUID, valuer.UUID) error
|
||||
|
||||
@@ -69,7 +69,6 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
key string // deterministic join of label values
|
||||
}
|
||||
seriesMap := map[sKey]*qbtypes.TimeSeries{}
|
||||
var keyOrder []sKey // preserves ClickHouse row-arrival order
|
||||
|
||||
stepMs := uint64(step.Duration.Milliseconds())
|
||||
|
||||
@@ -220,7 +219,6 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
if !ok {
|
||||
series = &qbtypes.TimeSeries{Labels: lblObjs}
|
||||
seriesMap[key] = series
|
||||
keyOrder = append(keyOrder, key)
|
||||
}
|
||||
series.Values = append(series.Values, &qbtypes.TimeSeriesValue{
|
||||
Timestamp: ts,
|
||||
@@ -252,8 +250,8 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
|
||||
Alias: "__result_" + strconv.Itoa(i),
|
||||
}
|
||||
}
|
||||
for _, k := range keyOrder {
|
||||
buckets[k.agg].Series = append(buckets[k.agg].Series, seriesMap[k])
|
||||
for k, s := range seriesMap {
|
||||
buckets[k.agg].Series = append(buckets[k.agg].Series, s)
|
||||
}
|
||||
|
||||
var nonEmpty []*qbtypes.AggregationBucket
|
||||
|
||||
@@ -185,6 +185,22 @@ func postProcessMetricQuery(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
req *qbtypes.QueryRangeRequest,
|
||||
) *qbtypes.Result {
|
||||
|
||||
config := query.Aggregations[0]
|
||||
spaceAggOrderBy := fmt.Sprintf("%s(%s)", config.SpaceAggregation.StringValue(), config.MetricName)
|
||||
timeAggOrderBy := fmt.Sprintf("%s(%s)", config.TimeAggregation.StringValue(), config.MetricName)
|
||||
timeSpaceAggOrderBy := fmt.Sprintf("%s(%s(%s))", config.SpaceAggregation.StringValue(), config.TimeAggregation.StringValue(), config.MetricName)
|
||||
|
||||
for idx := range query.Order {
|
||||
if query.Order[idx].Key.Name == spaceAggOrderBy ||
|
||||
query.Order[idx].Key.Name == timeAggOrderBy ||
|
||||
query.Order[idx].Key.Name == timeSpaceAggOrderBy {
|
||||
query.Order[idx].Key.Name = qbtypes.DefaultOrderByKey
|
||||
}
|
||||
}
|
||||
|
||||
result = q.applySeriesLimit(result, query.Limit, query.Order)
|
||||
|
||||
if len(query.Functions) > 0 {
|
||||
step := query.StepInterval.Duration.Milliseconds()
|
||||
functions := q.prepareFillZeroArgsWithStep(query.Functions, req, step)
|
||||
|
||||
@@ -132,14 +132,6 @@ func GroupByKeys(keys []qbtypes.GroupByKey) []string {
|
||||
return k
|
||||
}
|
||||
|
||||
func OrderByKeys(keys []qbtypes.OrderBy) []string {
|
||||
k := []string{}
|
||||
for _, key := range keys {
|
||||
k = append(k, "`"+key.Key.Name+"`")
|
||||
}
|
||||
return k
|
||||
}
|
||||
|
||||
func FormatValueForContains(value any) string {
|
||||
if value == nil {
|
||||
return ""
|
||||
|
||||
@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte WHERE (`host.name`) IN (SELECT `host.name` FROM __spatial_aggregation_cte GROUP BY `host.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `host.name`) DESC, `host.name`, ts ASC",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
@@ -547,16 +546,6 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
) (*qbtypes.Statement, error) {
|
||||
metricType := query.Aggregations[0].Type
|
||||
spaceAgg := query.Aggregations[0].SpaceAggregation
|
||||
finalCTE := "__spatial_aggregation_cte"
|
||||
if metricType == metrictypes.HistogramType {
|
||||
histogramCTE, histogramCTEArgs, err := b.buildHistogramCTE(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cteFragments = append(cteFragments, histogramCTE)
|
||||
cteArgs = append(cteArgs, histogramCTEArgs)
|
||||
finalCTE = "__histogram_cte"
|
||||
}
|
||||
|
||||
combined := querybuilder.CombineCTEs(cteFragments)
|
||||
|
||||
@@ -566,97 +555,60 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
}
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
sb.Select("*")
|
||||
sb.From(finalCTE)
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
|
||||
groupByKeys := querybuilder.GroupByKeys(query.GroupBy)
|
||||
hasOrder := len(query.Order) > 0
|
||||
hasLimit := query.Limit > 0
|
||||
hasGroupBy := len(groupByKeys) > 0
|
||||
if metricType == metrictypes.HistogramType && spaceAgg.IsPercentile() {
|
||||
quantile := query.Aggregations[0].SpaceAggregation.Percentile()
|
||||
sb.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
sb.SelectMore(fmt.Sprintf(
|
||||
"histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) AS value",
|
||||
quantile,
|
||||
))
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else if metricType == metrictypes.HistogramType && spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam != nil {
|
||||
sb.Select("ts")
|
||||
|
||||
if !hasGroupBy {
|
||||
// do nothing, limits and orders don't mean anything
|
||||
} else if hasOrder && hasLimit {
|
||||
labelSelectorSubQueryBuilder := sqlbuilder.NewSelectBuilder()
|
||||
labelSelectorSubQueryBuilder.Select(groupByKeys...)
|
||||
labelSelectorSubQueryBuilder.From(finalCTE)
|
||||
labelSelectorOrderClauses := []string{}
|
||||
orderedKeys := map[string]struct{}{} // this will be used to add the remaining keys as tie breakers in the end
|
||||
for _, o := range query.Order {
|
||||
key := o.Key.Name
|
||||
var clause string
|
||||
if strings.Contains(key, query.Aggregations[0].MetricName) {
|
||||
clause = fmt.Sprintf("avg(value) %s", o.Direction.StringValue())
|
||||
} else {
|
||||
clause = fmt.Sprintf("`%s` %s", key, o.Direction.StringValue())
|
||||
orderedKeys[fmt.Sprintf("`%s`", key)] = struct{}{}
|
||||
}
|
||||
labelSelectorOrderClauses = append(labelSelectorOrderClauses, clause)
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
for _, gk := range groupByKeys { // keys that haven't been added via order by keys will be added at the end as tie breakers
|
||||
if _, ok := orderedKeys[gk]; !ok {
|
||||
labelSelectorOrderClauses = append(labelSelectorOrderClauses, fmt.Sprintf("%s ASC", gk))
|
||||
}
|
||||
}
|
||||
labelSelectorSubQueryBuilder.GroupBy(groupByKeys...)
|
||||
labelSelectorSubQueryBuilder.OrderBy(labelSelectorOrderClauses...)
|
||||
labelSelectorSubQuery, _ := labelSelectorSubQueryBuilder.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
labelSelectorSubQuery = fmt.Sprintf("%s LIMIT %d", labelSelectorSubQuery, query.Limit)
|
||||
|
||||
sb.Where(fmt.Sprintf("(%s) IN (%s)", strings.Join(groupByKeys, ", "), labelSelectorSubQuery))
|
||||
for _, o := range query.Order {
|
||||
key := o.Key.Name
|
||||
var clause string
|
||||
if strings.Contains(key, query.Aggregations[0].MetricName) {
|
||||
clause = fmt.Sprintf("avg(value) OVER (PARTITION BY %s) %s", strings.Join(groupByKeys, ", "), o.Direction.StringValue())
|
||||
} else {
|
||||
clause = fmt.Sprintf("`%s` %s", key, o.Direction.StringValue())
|
||||
}
|
||||
sb.OrderBy(clause)
|
||||
aggQuery, err := AggregationQueryForHistogramCountWithParams(query.Aggregations[0].ComparisonSpaceAggregationParam)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else if hasOrder {
|
||||
// order by without limit: apply order by clauses directly
|
||||
for _, o := range query.Order {
|
||||
key := o.Key.Name
|
||||
if strings.Contains(key, query.Aggregations[0].MetricName) {
|
||||
sb.OrderBy(fmt.Sprintf("avg(value) OVER (PARTITION BY %s) %s", strings.Join(groupByKeys, ", "), o.Direction.StringValue()))
|
||||
continue
|
||||
}
|
||||
sb.OrderBy(fmt.Sprintf("`%s` %s", o.Key.Name, o.Direction.StringValue()))
|
||||
}
|
||||
} else if hasLimit {
|
||||
labelSelectorSubQueryBuilder := sqlbuilder.NewSelectBuilder()
|
||||
labelSelectorSubQueryBuilder.Select(groupByKeys...)
|
||||
labelSelectorSubQueryBuilder.From(finalCTE)
|
||||
labelSelectorSubQueryBuilder.GroupBy(groupByKeys...)
|
||||
labelSelectorSubQueryBuilder.OrderBy("avg(value) DESC")
|
||||
labelSelectorSubQuery, _ := labelSelectorSubQueryBuilder.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
labelSelectorSubQuery = fmt.Sprintf("%s LIMIT %d", labelSelectorSubQuery, query.Limit)
|
||||
sb.SelectMore(aggQuery)
|
||||
|
||||
sb.Where(fmt.Sprintf("(%s) IN (%s)", strings.Join(groupByKeys, ", "), labelSelectorSubQuery))
|
||||
sb.OrderBy(fmt.Sprintf("avg(value) OVER (PARTITION BY %s) DESC", strings.Join(groupByKeys, ", ")))
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else {
|
||||
// grouping without order by or limit: sort by avg(value) DESC with labels as tiebreakers
|
||||
sb.OrderBy(fmt.Sprintf("avg(value) OVER (PARTITION BY %s) DESC", strings.Join(groupByKeys, ", ")))
|
||||
}
|
||||
|
||||
// add any group-by keys not already in the order-by as tiebreakers
|
||||
orderKeySet := make(map[string]struct{})
|
||||
for _, o := range query.Order {
|
||||
orderKeySet[fmt.Sprintf("`%s`", o.Key.Name)] = struct{}{}
|
||||
}
|
||||
for _, g := range groupByKeys {
|
||||
if _, exists := orderKeySet[g]; !exists {
|
||||
sb.OrderBy(g)
|
||||
// for count aggregation on histograms with no params, the exact result of spatial aggregation can be sent forward
|
||||
sb.Select("*")
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
}
|
||||
|
||||
sb.OrderBy("ts ASC")
|
||||
sb.OrderBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.OrderBy("ts")
|
||||
if metricType == metrictypes.HistogramType && spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam == nil {
|
||||
sb.OrderBy("toFloat64(le)")
|
||||
}
|
||||
@@ -664,45 +616,3 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return &qbtypes.Statement{Query: combined + q, Args: append(args, a...)}, nil
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildHistogramCTE(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
) (string, []any, error) {
|
||||
spaceAgg := query.Aggregations[0].SpaceAggregation
|
||||
histogramCTEQueryBuilder := sqlbuilder.NewSelectBuilder()
|
||||
if spaceAgg.IsPercentile() {
|
||||
histogramCTEQueryBuilder.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
histogramCTEQueryBuilder.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
quantile := spaceAgg.Percentile()
|
||||
histogramCTEQueryBuilder.SelectMore(fmt.Sprintf(
|
||||
"histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), %.3f) AS value",
|
||||
quantile,
|
||||
))
|
||||
histogramCTEQueryBuilder.From("__spatial_aggregation_cte")
|
||||
histogramCTEQueryBuilder.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
histogramCTEQueryBuilder.GroupBy("ts")
|
||||
} else if spaceAgg == metrictypes.SpaceAggregationCount && query.Aggregations[0].ComparisonSpaceAggregationParam != nil {
|
||||
histogramCTEQueryBuilder.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
histogramCTEQueryBuilder.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
aggQuery, err := AggregationQueryForHistogramCountWithParams(query.Aggregations[0].ComparisonSpaceAggregationParam)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
histogramCTEQueryBuilder.SelectMore(aggQuery)
|
||||
histogramCTEQueryBuilder.From("__spatial_aggregation_cte")
|
||||
histogramCTEQueryBuilder.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
histogramCTEQueryBuilder.GroupBy("ts")
|
||||
} else {
|
||||
// for count aggregation on histograms with no params, the exact result of spatial aggregation can be sent forward
|
||||
histogramCTEQueryBuilder.Select("*")
|
||||
histogramCTEQueryBuilder.From("__spatial_aggregation_cte")
|
||||
}
|
||||
histogramQueryCTE, histogramQueryCTEArgs := histogramCTEQueryBuilder.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
histogramCTE := fmt.Sprintf("__histogram_cte AS (%s)", histogramQueryCTE)
|
||||
|
||||
return histogramCTE, histogramQueryCTEArgs, nil
|
||||
}
|
||||
|
||||
@@ -15,17 +15,16 @@ import (
|
||||
)
|
||||
|
||||
func TestStatementBuilder(t *testing.T) {
|
||||
type baseQuery struct {
|
||||
name string
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]
|
||||
orderKey string
|
||||
args []any
|
||||
cte string
|
||||
}
|
||||
|
||||
bases := []baseQuery{
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "cumulative_rate_sum",
|
||||
name: "test_cumulative_rate_sum",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -41,16 +40,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "cumulative_rate_sum_with_mat_column",
|
||||
name: "test_cumulative_rate_sum_with_mat_column",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -66,16 +73,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "materialized.key.name REGEXP 'cartservice' OR service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "delta_rate_sum",
|
||||
name: "test_delta_rate_sum",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -91,16 +106,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
|
||||
cte: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "histogram_percentile1",
|
||||
name: "test_histogram_percentile1",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -116,38 +139,24 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "service.name = 'cartservice'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
|
||||
cte: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`), __histogram_cte AS (SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts)",
|
||||
},
|
||||
{
|
||||
name: "histogram_percentile2",
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "http_server_duration_bucket",
|
||||
Type: metrictypes.HistogramType,
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "service.name"}},
|
||||
},
|
||||
},
|
||||
orderKey: "service.name",
|
||||
args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`), __histogram_cte AS (SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "gauge_avg_sum",
|
||||
name: "test_gauge_avg_sum",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
@@ -163,81 +172,51 @@ func TestStatementBuilder(t *testing.T) {
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "host.name = 'big-data-node-1'",
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "host.name"}},
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "host.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
orderKey: "host.name",
|
||||
args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
cte: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`)",
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "test_histogram_percentile2",
|
||||
requestType: qbtypes.RequestTypeTimeSeries,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]{
|
||||
Signal: telemetrytypes.SignalMetrics,
|
||||
StepInterval: qbtypes.Step{Duration: 30 * time.Second},
|
||||
Aggregations: []qbtypes.MetricAggregation{
|
||||
{
|
||||
MetricName: "http_server_duration_bucket",
|
||||
Type: metrictypes.HistogramType,
|
||||
Temporality: metrictypes.Cumulative,
|
||||
TimeAggregation: metrictypes.TimeAggregationRate,
|
||||
SpaceAggregation: metrictypes.SpaceAggregationPercentile95,
|
||||
},
|
||||
},
|
||||
Limit: 10,
|
||||
GroupBy: []qbtypes.GroupByKey{
|
||||
{
|
||||
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
|
||||
Name: "service.name",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
type variant struct {
|
||||
name string
|
||||
limit int
|
||||
hasOrder bool
|
||||
}
|
||||
|
||||
variants := []variant{
|
||||
{"with_limits", 10, false},
|
||||
{"without_limits", 0, false},
|
||||
{"with_order_by", 0, true},
|
||||
{"with_order_by_and_limits", 10, true},
|
||||
}
|
||||
|
||||
sumMetricsFinalSelects := map[string]string{
|
||||
"with_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"without_limits": " SELECT * FROM __spatial_aggregation_cte ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"with_order_by": " SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name` asc, ts ASC",
|
||||
"with_order_by_and_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __spatial_aggregation_cte GROUP BY `service.name` ORDER BY `service.name` asc LIMIT 10) ORDER BY `service.name` asc, ts ASC",
|
||||
}
|
||||
|
||||
histogramMetricsFinalSelects := map[string]string{
|
||||
"with_limits": " SELECT * FROM __histogram_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __histogram_cte GROUP BY `service.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"without_limits": " SELECT * FROM __histogram_cte ORDER BY avg(value) OVER (PARTITION BY `service.name`) DESC, `service.name`, ts ASC",
|
||||
"with_order_by": " SELECT * FROM __histogram_cte ORDER BY `service.name` asc, ts ASC",
|
||||
"with_order_by_and_limits": " SELECT * FROM __histogram_cte WHERE (`service.name`) IN (SELECT `service.name` FROM __histogram_cte GROUP BY `service.name` ORDER BY `service.name` asc LIMIT 10) ORDER BY `service.name` asc, ts ASC",
|
||||
}
|
||||
|
||||
// expectedFinalSelects maps "base/variant" to the final SELECT portion after the CTE.
|
||||
// The full expected query is: base.cte + expectedFinalSelects[name]
|
||||
expectedFinalSelects := map[string]string{
|
||||
// cumulative_rate_sum
|
||||
"cumulative_rate_sum/with_limits": sumMetricsFinalSelects["with_limits"],
|
||||
"cumulative_rate_sum/without_limits": sumMetricsFinalSelects["without_limits"],
|
||||
"cumulative_rate_sum/with_order_by": sumMetricsFinalSelects["with_order_by"],
|
||||
"cumulative_rate_sum/with_order_by_and_limits": sumMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// cumulative_rate_sum_with_mat_column
|
||||
"cumulative_rate_sum_with_mat_column/with_limits": sumMetricsFinalSelects["with_limits"],
|
||||
"cumulative_rate_sum_with_mat_column/without_limits": sumMetricsFinalSelects["without_limits"],
|
||||
"cumulative_rate_sum_with_mat_column/with_order_by": sumMetricsFinalSelects["with_order_by"],
|
||||
"cumulative_rate_sum_with_mat_column/with_order_by_and_limits": sumMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// delta_rate_sum
|
||||
"delta_rate_sum/with_limits": sumMetricsFinalSelects["with_limits"],
|
||||
"delta_rate_sum/without_limits": sumMetricsFinalSelects["without_limits"],
|
||||
"delta_rate_sum/with_order_by": sumMetricsFinalSelects["with_order_by"],
|
||||
"delta_rate_sum/with_order_by_and_limits": sumMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// histogram_percentile1
|
||||
"histogram_percentile1/with_limits": histogramMetricsFinalSelects["with_limits"],
|
||||
"histogram_percentile1/without_limits": histogramMetricsFinalSelects["without_limits"],
|
||||
"histogram_percentile1/with_order_by": histogramMetricsFinalSelects["with_order_by"],
|
||||
"histogram_percentile1/with_order_by_and_limits": histogramMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// histogram_percentile2
|
||||
"histogram_percentile2/with_limits": histogramMetricsFinalSelects["with_limits"],
|
||||
"histogram_percentile2/without_limits": histogramMetricsFinalSelects["without_limits"],
|
||||
"histogram_percentile2/with_order_by": histogramMetricsFinalSelects["with_order_by"],
|
||||
"histogram_percentile2/with_order_by_and_limits": histogramMetricsFinalSelects["with_order_by_and_limits"],
|
||||
|
||||
// gauge_avg_sum
|
||||
"gauge_avg_sum/with_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`host.name`) IN (SELECT `host.name` FROM __spatial_aggregation_cte GROUP BY `host.name` ORDER BY avg(value) DESC LIMIT 10) ORDER BY avg(value) OVER (PARTITION BY `host.name`) DESC, `host.name`, ts ASC",
|
||||
"gauge_avg_sum/without_limits": " SELECT * FROM __spatial_aggregation_cte ORDER BY avg(value) OVER (PARTITION BY `host.name`) DESC, `host.name`, ts ASC",
|
||||
"gauge_avg_sum/with_order_by": " SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name` asc, ts ASC",
|
||||
"gauge_avg_sum/with_order_by_and_limits": " SELECT * FROM __spatial_aggregation_cte WHERE (`host.name`) IN (SELECT `host.name` FROM __spatial_aggregation_cte GROUP BY `host.name` ORDER BY `host.name` asc LIMIT 10) ORDER BY `host.name` asc, ts ASC",
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
@@ -248,13 +227,15 @@ func TestStatementBuilder(t *testing.T) {
|
||||
t.Fatalf("failed to load field keys: %v", err)
|
||||
}
|
||||
mockMetadataStore.KeysMap = keys
|
||||
// NOTE: LoadFieldKeysFromJSON doesn't set Materialized field
|
||||
// for keys, so we have to set it manually here for testing
|
||||
if _, ok := mockMetadataStore.KeysMap["materialized.key.name"]; ok {
|
||||
if len(mockMetadataStore.KeysMap["materialized.key.name"]) > 0 {
|
||||
mockMetadataStore.KeysMap["materialized.key.name"][0].Materialized = true
|
||||
}
|
||||
}
|
||||
|
||||
fl, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
flagger, err := flagger.New(context.Background(), instrumentationtest.New().ToProviderSettings(), flagger.Config{}, flagger.MustNewRegistry())
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create flagger: %v", err)
|
||||
}
|
||||
@@ -264,30 +245,23 @@ func TestStatementBuilder(t *testing.T) {
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
fl,
|
||||
flagger,
|
||||
)
|
||||
|
||||
for _, b := range bases {
|
||||
for _, v := range variants {
|
||||
name := b.name + "/" + v.name
|
||||
t.Run(name, func(t *testing.T) {
|
||||
q := b.query
|
||||
q.Limit = v.limit
|
||||
if v.hasOrder {
|
||||
q.Order = []qbtypes.OrderBy{
|
||||
{
|
||||
Key: qbtypes.OrderByKey{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: b.orderKey}},
|
||||
Direction: qbtypes.OrderDirectionAsc,
|
||||
},
|
||||
}
|
||||
}
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
|
||||
result, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, qbtypes.RequestTypeTimeSeries, q, nil)
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, b.cte+expectedFinalSelects[name], result.Query)
|
||||
require.Equal(t, b.args, result.Args)
|
||||
})
|
||||
}
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,50 @@
|
||||
package telemetrytraces
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
// Internal Columns
|
||||
SpanTimestampBucketStartColumn = "ts_bucket_start"
|
||||
SpanResourceFingerPrintColumn = "resource_fingerprint"
|
||||
|
||||
// Intrinsic Columns
|
||||
SpanTimestampColumn = "timestamp"
|
||||
SpanTraceIDColumn = "trace_id"
|
||||
SpanSpanIDColumn = "span_id"
|
||||
SpanTraceStateColumn = "trace_state"
|
||||
SpanParentSpanIDColumn = "parent_span_id"
|
||||
SpanFlagsColumn = "flags"
|
||||
SpanNameColumn = "name"
|
||||
SpanKindColumn = "kind"
|
||||
SpanKindStringColumn = "kind_string"
|
||||
SpanDurationNanoColumn = "duration_nano"
|
||||
SpanStatusCodeColumn = "status_code"
|
||||
SpanStatusMessageColumn = "status_message"
|
||||
SpanStatusCodeStringColumn = "status_code_string"
|
||||
SpanEventsColumn = "events"
|
||||
SpanLinksColumn = "links"
|
||||
|
||||
// Calculated Columns
|
||||
SpanResponseStatusCodeColumn = "response_status_code"
|
||||
SpanExternalHTTPURLColumn = "external_http_url"
|
||||
SpanHTTPURLColumn = "http_url"
|
||||
SpanExternalHTTPMethodColumn = "external_http_method"
|
||||
SpanHTTPMethodColumn = "http_method"
|
||||
SpanHTTPHostColumn = "http_host"
|
||||
SpanDBNameColumn = "db_name"
|
||||
SpanDBOperationColumn = "db_operation"
|
||||
SpanHasErrorColumn = "has_error"
|
||||
SpanIsRemoteColumn = "is_remote"
|
||||
|
||||
// Contextual Columns
|
||||
SpanAttributesStringColumn = "attributes_string"
|
||||
SpanAttributesNumberColumn = "attributes_number"
|
||||
SpanAttributesBoolColumn = "attributes_bool"
|
||||
SpanResourcesStringColumn = "resources_string"
|
||||
)
|
||||
|
||||
var (
|
||||
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -14,7 +13,6 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -74,41 +72,6 @@ func (b *traceQueryStatementBuilder) Build(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
/*
|
||||
Adding a tech debt note here:
|
||||
This piece of code is a hot fix and should be removed once we close issue: engineering-pod/issues/3622
|
||||
*/
|
||||
/*
|
||||
-------------------------------- Start of tech debt ----------------------------
|
||||
*/
|
||||
if requestType == qbtypes.RequestTypeRaw {
|
||||
|
||||
selectedFields := query.SelectFields
|
||||
|
||||
if len(selectedFields) == 0 {
|
||||
sortedKeys := maps.Keys(DefaultFields)
|
||||
slices.Sort(sortedKeys)
|
||||
for _, key := range sortedKeys {
|
||||
selectedFields = append(selectedFields, DefaultFields[key])
|
||||
}
|
||||
query.SelectFields = selectedFields
|
||||
}
|
||||
|
||||
selectFieldKeys := []string{}
|
||||
for _, field := range selectedFields {
|
||||
selectFieldKeys = append(selectFieldKeys, field.Name)
|
||||
}
|
||||
|
||||
for _, x := range []string{"timestamp", "span_id", "trace_id"} {
|
||||
if !slices.Contains(selectFieldKeys, x) {
|
||||
query.SelectFields = append(query.SelectFields, DefaultFields[x])
|
||||
}
|
||||
}
|
||||
}
|
||||
/*
|
||||
-------------------------------- End of tech debt ----------------------------
|
||||
*/
|
||||
|
||||
query = b.adjustKeys(ctx, keys, query, requestType)
|
||||
|
||||
// Check if filter contains trace_id(s) and optimize time range if needed
|
||||
@@ -311,13 +274,53 @@ func (b *traceQueryStatementBuilder) buildListQuery(
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
|
||||
// TODO: should we deprecate `SelectFields` and return everything from a span like we do for logs?
|
||||
for _, field := range query.SelectFields {
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
sb.SelectMore(SpanTimestampColumn)
|
||||
sb.SelectMore(SpanTraceIDColumn)
|
||||
sb.SelectMore(SpanSpanIDColumn)
|
||||
// By default select all
|
||||
if len(query.SelectFields) == 0 {
|
||||
// Select all intrinsic columns
|
||||
sb.SelectMore(SpanTraceStateColumn)
|
||||
sb.SelectMore(SpanParentSpanIDColumn)
|
||||
sb.SelectMore(SpanFlagsColumn)
|
||||
sb.SelectMore(SpanNameColumn)
|
||||
sb.SelectMore(SpanKindColumn)
|
||||
sb.SelectMore(SpanKindStringColumn)
|
||||
sb.SelectMore(SpanDurationNanoColumn)
|
||||
sb.SelectMore(SpanStatusCodeColumn)
|
||||
sb.SelectMore(SpanStatusMessageColumn)
|
||||
sb.SelectMore(SpanStatusCodeStringColumn)
|
||||
sb.SelectMore(SpanEventsColumn)
|
||||
sb.SelectMore(SpanLinksColumn)
|
||||
|
||||
// select all calculated columns
|
||||
sb.SelectMore(SpanResponseStatusCodeColumn)
|
||||
sb.SelectMore(SpanExternalHTTPURLColumn)
|
||||
sb.SelectMore(SpanHTTPURLColumn)
|
||||
sb.SelectMore(SpanExternalHTTPMethodColumn)
|
||||
sb.SelectMore(SpanHTTPMethodColumn)
|
||||
sb.SelectMore(SpanHTTPHostColumn)
|
||||
sb.SelectMore(SpanDBNameColumn)
|
||||
sb.SelectMore(SpanDBOperationColumn)
|
||||
sb.SelectMore(SpanHasErrorColumn)
|
||||
sb.SelectMore(SpanIsRemoteColumn)
|
||||
|
||||
// select all contextual columns
|
||||
sb.SelectMore(SpanAttributesStringColumn)
|
||||
sb.SelectMore(SpanAttributesNumberColumn)
|
||||
sb.SelectMore(SpanAttributesBoolColumn)
|
||||
sb.SelectMore(SpanResourcesStringColumn)
|
||||
} else {
|
||||
for _, field := range query.SelectFields {
|
||||
if field.Name == SpanTimestampColumn || field.Name == SpanTraceIDColumn || field.Name == SpanSpanIDColumn {
|
||||
continue
|
||||
}
|
||||
colExpr, err := b.fm.ColumnExpressionFor(ctx, &field, keys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sb.SelectMore(colExpr)
|
||||
}
|
||||
sb.SelectMore(colExpr)
|
||||
}
|
||||
|
||||
// From table
|
||||
|
||||
@@ -20,19 +20,20 @@ var (
|
||||
)
|
||||
|
||||
var TypeableRelations = map[Type][]Relation{
|
||||
TypeUser: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeRole: {RelationAssignee, RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeOrganization: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeMetaResource: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeMetaResources: {RelationCreate, RelationList},
|
||||
TypeUser: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeServiceAccount: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeRole: {RelationAssignee, RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeOrganization: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeMetaResource: {RelationRead, RelationUpdate, RelationDelete},
|
||||
TypeMetaResources: {RelationCreate, RelationList},
|
||||
}
|
||||
|
||||
var RelationsTypeable = map[Relation][]Type{
|
||||
RelationCreate: {TypeMetaResources},
|
||||
RelationRead: {TypeUser, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationRead: {TypeUser, TypeServiceAccount, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationList: {TypeMetaResources},
|
||||
RelationUpdate: {TypeUser, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationDelete: {TypeUser, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationUpdate: {TypeUser, TypeServiceAccount, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationDelete: {TypeUser, TypeServiceAccount, TypeRole, TypeOrganization, TypeMetaResource},
|
||||
RelationAssignee: {TypeRole},
|
||||
}
|
||||
|
||||
|
||||
@@ -23,11 +23,12 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
|
||||
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
|
||||
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeUserSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeServiceAccountSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeRoleSelectorRegex = regexp.MustCompile(`^([a-z-]{1,50}|\*)$`)
|
||||
typeAnonymousSelectorRegex = regexp.MustCompile(`^\*$`)
|
||||
typeOrganizationSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
typeMetaResourceSelectorRegex = regexp.MustCompile(`^(^[0-9a-f]{8}(?:\-[0-9a-f]{4}){3}-[0-9a-f]{12}$|\*)$`)
|
||||
// metaresources selectors are used to select either all or none until we introduce some hierarchy here.
|
||||
typeMetaResourcesSelectorRegex = regexp.MustCompile(`^\*$`)
|
||||
)
|
||||
@@ -98,6 +99,11 @@ func IsValidSelector(typed Type, selector string) error {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelector, "selector must conform to regex %s", typeUserSelectorRegex.String())
|
||||
}
|
||||
return nil
|
||||
case TypeServiceAccount:
|
||||
if !typeServiceAccountSelectorRegex.MatchString(selector) {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelector, "selector must conform to regex %s", typeServiceAccountSelectorRegex.String())
|
||||
}
|
||||
return nil
|
||||
case TypeRole:
|
||||
if !typeRoleSelectorRegex.MatchString(selector) {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeAuthZInvalidSelector, "selector must conform to regex %s", typeRoleSelectorRegex.String())
|
||||
|
||||
@@ -15,19 +15,21 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
TypeUser = Type{valuer.NewString("user")}
|
||||
TypeAnonymous = Type{valuer.NewString("anonymous")}
|
||||
TypeRole = Type{valuer.NewString("role")}
|
||||
TypeOrganization = Type{valuer.NewString("organization")}
|
||||
TypeMetaResource = Type{valuer.NewString("metaresource")}
|
||||
TypeMetaResources = Type{valuer.NewString("metaresources")}
|
||||
TypeUser = Type{valuer.NewString("user")}
|
||||
TypeServiceAccount = Type{valuer.NewString("serviceaccount")}
|
||||
TypeAnonymous = Type{valuer.NewString("anonymous")}
|
||||
TypeRole = Type{valuer.NewString("role")}
|
||||
TypeOrganization = Type{valuer.NewString("organization")}
|
||||
TypeMetaResource = Type{valuer.NewString("metaresource")}
|
||||
TypeMetaResources = Type{valuer.NewString("metaresources")}
|
||||
)
|
||||
|
||||
var (
|
||||
TypeableUser = &typeableUser{}
|
||||
TypeableAnonymous = &typeableAnonymous{}
|
||||
TypeableRole = &typeableRole{}
|
||||
TypeableOrganization = &typeableOrganization{}
|
||||
TypeableUser = &typeableUser{}
|
||||
TypeableServiceAccount = &typeableServiceAccount{}
|
||||
TypeableAnonymous = &typeableAnonymous{}
|
||||
TypeableRole = &typeableRole{}
|
||||
TypeableOrganization = &typeableOrganization{}
|
||||
)
|
||||
|
||||
type Typeable interface {
|
||||
@@ -53,6 +55,8 @@ func NewType(input string) (Type, error) {
|
||||
switch input {
|
||||
case "user":
|
||||
return TypeUser, nil
|
||||
case "serviceaccount":
|
||||
return TypeServiceAccount, nil
|
||||
case "role":
|
||||
return TypeRole, nil
|
||||
case "organization":
|
||||
@@ -88,6 +92,8 @@ func NewTypeableFromType(typed Type, name Name) (Typeable, error) {
|
||||
return TypeableRole, nil
|
||||
case TypeUser:
|
||||
return TypeableUser, nil
|
||||
case TypeServiceAccount:
|
||||
return TypeableServiceAccount, nil
|
||||
case TypeOrganization:
|
||||
return TypeableOrganization, nil
|
||||
case TypeMetaResource:
|
||||
|
||||
38
pkg/types/authtypes/typeable_serviceaccount.go
Normal file
38
pkg/types/authtypes/typeable_serviceaccount.go
Normal file
@@ -0,0 +1,38 @@
|
||||
package authtypes
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
)
|
||||
|
||||
var _ Typeable = new(typeableServiceAccount)
|
||||
|
||||
type typeableServiceAccount struct{}
|
||||
|
||||
func (typeableServiceAccount *typeableServiceAccount) Tuples(subject string, relation Relation, selectors []Selector, orgID valuer.UUID) ([]*openfgav1.TupleKey, error) {
|
||||
tuples := make([]*openfgav1.TupleKey, 0)
|
||||
|
||||
for _, selector := range selectors {
|
||||
object := typeableServiceAccount.Prefix(orgID) + "/" + selector.String()
|
||||
tuples = append(tuples, &openfgav1.TupleKey{User: subject, Relation: relation.StringValue(), Object: object})
|
||||
}
|
||||
|
||||
return tuples, nil
|
||||
}
|
||||
|
||||
func (typeableServiceAccount *typeableServiceAccount) Type() Type {
|
||||
return TypeServiceAccount
|
||||
}
|
||||
|
||||
func (typeableServiceAccount *typeableServiceAccount) Name() Name {
|
||||
return MustNewName("serviceaccount")
|
||||
}
|
||||
|
||||
// example: serviceaccount:organization/0199c47d-f61b-7833-bc5f-c0730f12f046/serviceaccount
|
||||
func (typeableServiceAccount *typeableServiceAccount) Prefix(orgID valuer.UUID) string {
|
||||
return typeableServiceAccount.Type().StringValue() + ":" + "organization" + "/" + orgID.StringValue() + "/" + typeableServiceAccount.Name().String()
|
||||
}
|
||||
|
||||
func (typeableServiceAccount *typeableServiceAccount) Scope(relation Relation) string {
|
||||
return typeableServiceAccount.Name().String() + ":" + relation.StringValue()
|
||||
}
|
||||
@@ -11,20 +11,22 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeServiceAccountFactorAPIkeyInvalidInput = errors.MustNewCode("service_account_factor_api_key_invalid_input")
|
||||
ErrCodeServiceAccountFactorAPIKeyAlreadyExists = errors.MustNewCode("service_account_factor_api_key_already_exists")
|
||||
ErrCodeServiceAccounFactorAPIKeytNotFound = errors.MustNewCode("service_account_factor_api_key_not_found")
|
||||
ErrCodeAPIkeyInvalidInput = errors.MustNewCode("service_account_factor_api_key_invalid_input")
|
||||
ErrCodeAPIKeyAlreadyExists = errors.MustNewCode("service_account_factor_api_key_already_exists")
|
||||
ErrCodeAPIKeytNotFound = errors.MustNewCode("service_account_factor_api_key_not_found")
|
||||
ErrCodeAPIKeyExpired = errors.MustNewCode("api_key_expired")
|
||||
ErrCodeAPIkeyOlderLastObservedAt = errors.MustNewCode("api_key_older_last_observed_at")
|
||||
)
|
||||
|
||||
type StorableFactorAPIKey struct {
|
||||
bun.BaseModel `bun:"table:factor_api_key"`
|
||||
bun.BaseModel `bun:"table:factor_api_key,alias:factor_api_key"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `bun:"name"`
|
||||
Key string `bun:"key"`
|
||||
ExpiresAt uint64 `bun:"expires_at"`
|
||||
LastUsed time.Time `bun:"last_used"`
|
||||
LastObservedAt time.Time `bun:"last_observed_at"`
|
||||
ServiceAccountID string `bun:"service_account_id"`
|
||||
}
|
||||
|
||||
@@ -33,9 +35,9 @@ type FactorAPIKey struct {
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" requrired:"true"`
|
||||
Key string `json:"key" required:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
LastUsed time.Time `json:"last_used" required:"true"`
|
||||
ServiceAccountID valuer.UUID `json:"service_account_id" required:"true"`
|
||||
ExpiresAt uint64 `json:"expiresAt" required:"true"`
|
||||
LastObservedAt time.Time `json:"lastObservedAt" required:"true"`
|
||||
ServiceAccountID valuer.UUID `json:"serviceAccountId" required:"true"`
|
||||
}
|
||||
|
||||
type GettableFactorAPIKeyWithKey struct {
|
||||
@@ -47,19 +49,19 @@ type GettableFactorAPIKey struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" requrired:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
LastUsed time.Time `json:"last_used" required:"true"`
|
||||
ServiceAccountID valuer.UUID `json:"service_account_id" required:"true"`
|
||||
ExpiresAt uint64 `json:"expiresAt" required:"true"`
|
||||
LastObservedAt time.Time `json:"lastObservedAt" required:"true"`
|
||||
ServiceAccountID valuer.UUID `json:"serviceAccountId" required:"true"`
|
||||
}
|
||||
|
||||
type PostableFactorAPIKey struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
ExpiresAt uint64 `json:"expiresAt" required:"true"`
|
||||
}
|
||||
|
||||
type UpdatableFactorAPIKey struct {
|
||||
Name string `json:"name" required:"true"`
|
||||
ExpiresAt uint64 `json:"expires_at" required:"true"`
|
||||
ExpiresAt uint64 `json:"expiresAt" required:"true"`
|
||||
}
|
||||
|
||||
func NewFactorAPIKeyFromStorable(storable *StorableFactorAPIKey) *FactorAPIKey {
|
||||
@@ -69,7 +71,7 @@ func NewFactorAPIKeyFromStorable(storable *StorableFactorAPIKey) *FactorAPIKey {
|
||||
Name: storable.Name,
|
||||
Key: storable.Key,
|
||||
ExpiresAt: storable.ExpiresAt,
|
||||
LastUsed: storable.LastUsed,
|
||||
LastObservedAt: storable.LastObservedAt,
|
||||
ServiceAccountID: valuer.MustNewUUID(storable.ServiceAccountID),
|
||||
}
|
||||
}
|
||||
@@ -91,7 +93,7 @@ func NewStorableFactorAPIKey(factorAPIKey *FactorAPIKey) *StorableFactorAPIKey {
|
||||
Name: factorAPIKey.Name,
|
||||
Key: factorAPIKey.Key,
|
||||
ExpiresAt: factorAPIKey.ExpiresAt,
|
||||
LastUsed: factorAPIKey.LastUsed,
|
||||
LastObservedAt: factorAPIKey.LastObservedAt,
|
||||
ServiceAccountID: factorAPIKey.ServiceAccountID.String(),
|
||||
}
|
||||
}
|
||||
@@ -105,7 +107,7 @@ func NewGettableFactorAPIKeys(keys []*FactorAPIKey) []*GettableFactorAPIKey {
|
||||
TimeAuditable: key.TimeAuditable,
|
||||
Name: key.Name,
|
||||
ExpiresAt: key.ExpiresAt,
|
||||
LastUsed: key.LastUsed,
|
||||
LastObservedAt: key.LastObservedAt,
|
||||
ServiceAccountID: key.ServiceAccountID,
|
||||
}
|
||||
}
|
||||
@@ -128,6 +130,29 @@ func (apiKey *FactorAPIKey) Update(name string, expiresAt uint64) {
|
||||
apiKey.UpdatedAt = time.Now()
|
||||
}
|
||||
|
||||
func (apiKey *FactorAPIKey) IsExpired() error {
|
||||
if apiKey.ExpiresAt == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if time.Now().After(time.Unix(int64(apiKey.ExpiresAt), 0)) {
|
||||
return errors.New(errors.TypeUnauthenticated, ErrCodeAPIKeyExpired, "api key has been expired")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (apiKey *FactorAPIKey) UpdateLastObservedAt(lastObservedAt time.Time) error {
|
||||
if lastObservedAt.Before(apiKey.LastObservedAt) {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeAPIkeyOlderLastObservedAt, "last observed at is before the current last observed at")
|
||||
}
|
||||
|
||||
apiKey.LastObservedAt = lastObservedAt
|
||||
apiKey.UpdatedAt = time.Now()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (key *PostableFactorAPIKey) UnmarshalJSON(data []byte) error {
|
||||
type Alias PostableFactorAPIKey
|
||||
|
||||
@@ -137,7 +162,7 @@ func (key *PostableFactorAPIKey) UnmarshalJSON(data []byte) error {
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountFactorAPIkeyInvalidInput, "name cannot be empty")
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeAPIkeyInvalidInput, "name cannot be empty")
|
||||
}
|
||||
|
||||
*key = PostableFactorAPIKey(temp)
|
||||
@@ -153,7 +178,7 @@ func (key *UpdatableFactorAPIKey) UnmarshalJSON(data []byte) error {
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountFactorAPIkeyInvalidInput, "name cannot be empty")
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeAPIkeyInvalidInput, "name cannot be empty")
|
||||
}
|
||||
|
||||
*key = UpdatableFactorAPIKey(temp)
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"slices"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -15,10 +15,11 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
ErrCodeServiceAccountInvalidInput = errors.MustNewCode("service_account_invalid_input")
|
||||
ErrCodeServiceAccountAlreadyExists = errors.MustNewCode("service_account_already_exists")
|
||||
ErrCodeServiceAccountNotFound = errors.MustNewCode("service_account_not_found")
|
||||
ErrCodeServiceAccountRoleAlreadyExists = errors.MustNewCode("service_account_role_already_exists")
|
||||
ErrCodeServiceAccountInvalidInput = errors.MustNewCode("service_account_invalid_input")
|
||||
ErrCodeServiceAccountAlreadyExists = errors.MustNewCode("service_account_already_exists")
|
||||
ErrCodeServiceAccountNotFound = errors.MustNewCode("service_account_not_found")
|
||||
ErrCodeServiceAccountRoleAlreadyExists = errors.MustNewCode("service_account_role_already_exists")
|
||||
ErrCodeServiceAccountOperationUnsupported = errors.MustNewCode("service_account_operation_unsupported")
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -27,25 +28,31 @@ var (
|
||||
ValidStatus = []valuer.String{StatusActive, StatusDisabled}
|
||||
)
|
||||
|
||||
var (
|
||||
serviceAccountNameRegex = regexp.MustCompile("^[a-z-]{1,50}$")
|
||||
)
|
||||
|
||||
type StorableServiceAccount struct {
|
||||
bun.BaseModel `bun:"table:service_account,alias:service_account"`
|
||||
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `bun:"name"`
|
||||
Email string `bun:"email"`
|
||||
Status valuer.String `bun:"status"`
|
||||
OrgID string `bun:"org_id"`
|
||||
Name string `bun:"name"`
|
||||
Email string `bun:"email"`
|
||||
Status valuer.String `bun:"status"`
|
||||
OrgID string `bun:"org_id"`
|
||||
DeletedAt time.Time `bun:"deleted_at"`
|
||||
}
|
||||
|
||||
type ServiceAccount struct {
|
||||
types.Identifiable
|
||||
types.TimeAuditable
|
||||
Name string `json:"name" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
Roles []string `json:"roles" required:"true" nullable:"false"`
|
||||
Status valuer.String `json:"status" required:"true"`
|
||||
OrgID valuer.UUID `json:"orgID" required:"true"`
|
||||
Name string `json:"name" required:"true"`
|
||||
Email valuer.Email `json:"email" required:"true"`
|
||||
Roles []string `json:"roles" required:"true" nullable:"false"`
|
||||
Status valuer.String `json:"status" required:"true"`
|
||||
OrgID valuer.UUID `json:"orgId" required:"true"`
|
||||
DeletedAt time.Time `json:"deletedAt" required:"true"`
|
||||
}
|
||||
|
||||
type PostableServiceAccount struct {
|
||||
@@ -73,11 +80,12 @@ func NewServiceAccount(name string, email valuer.Email, roles []string, status v
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
Name: name,
|
||||
Email: email,
|
||||
Roles: roles,
|
||||
Status: status,
|
||||
OrgID: orgID,
|
||||
Name: name,
|
||||
Email: email,
|
||||
Roles: roles,
|
||||
Status: status,
|
||||
OrgID: orgID,
|
||||
DeletedAt: time.Time{},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,6 +98,7 @@ func NewServiceAccountFromStorables(storableServiceAccount *StorableServiceAccou
|
||||
Roles: roles,
|
||||
Status: storableServiceAccount.Status,
|
||||
OrgID: valuer.MustNewUUID(storableServiceAccount.OrgID),
|
||||
DeletedAt: storableServiceAccount.DeletedAt,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,22 +135,46 @@ func NewStorableServiceAccount(serviceAccount *ServiceAccount) *StorableServiceA
|
||||
Email: serviceAccount.Email.String(),
|
||||
Status: serviceAccount.Status,
|
||||
OrgID: serviceAccount.OrgID.String(),
|
||||
DeletedAt: serviceAccount.DeletedAt,
|
||||
}
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) Update(name string, email valuer.Email, roles []string) {
|
||||
func (sa *ServiceAccount) Update(name string, email valuer.Email, roles []string) error {
|
||||
if err := sa.ErrIfDisabled(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sa.Name = name
|
||||
sa.Email = email
|
||||
sa.Roles = roles
|
||||
sa.UpdatedAt = time.Now()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) UpdateStatus(status valuer.String) {
|
||||
func (sa *ServiceAccount) UpdateStatus(status valuer.String) error {
|
||||
if err := sa.ErrIfDisabled(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sa.Status = status
|
||||
sa.UpdatedAt = time.Now()
|
||||
sa.DeletedAt = time.Now()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) ErrIfDisabled() error {
|
||||
if sa.Status == StatusDisabled {
|
||||
return errors.New(errors.TypeUnsupported, ErrCodeServiceAccountOperationUnsupported, "this operation is not supported for disabled service account")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sa *ServiceAccount) NewFactorAPIKey(name string, expiresAt uint64) (*FactorAPIKey, error) {
|
||||
if err := sa.ErrIfDisabled(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
key := make([]byte, 32)
|
||||
_, err := rand.Read(key)
|
||||
if err != nil {
|
||||
@@ -161,7 +194,7 @@ func (sa *ServiceAccount) NewFactorAPIKey(name string, expiresAt uint64) (*Facto
|
||||
Name: name,
|
||||
Key: encodedKey,
|
||||
ExpiresAt: expiresAt,
|
||||
LastUsed: time.Now(),
|
||||
LastObservedAt: time.Now(),
|
||||
ServiceAccountID: sa.ID,
|
||||
}, nil
|
||||
}
|
||||
@@ -204,8 +237,8 @@ func (sa *PostableServiceAccount) UnmarshalJSON(data []byte) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "name cannot be empty")
|
||||
if match := serviceAccountNameRegex.MatchString(temp.Name); !match {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "name must conform to the regex: %s", serviceAccountNameRegex.String())
|
||||
}
|
||||
|
||||
if len(temp.Roles) == 0 {
|
||||
@@ -224,8 +257,8 @@ func (sa *UpdatableServiceAccount) UnmarshalJSON(data []byte) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if temp.Name == "" {
|
||||
return errors.New(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "name cannot be empty")
|
||||
if match := serviceAccountNameRegex.MatchString(temp.Name); !match {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "name must conform to the regex: %s", serviceAccountNameRegex.String())
|
||||
}
|
||||
|
||||
if len(temp.Roles) == 0 {
|
||||
@@ -244,8 +277,8 @@ func (sa *UpdatableServiceAccountStatus) UnmarshalJSON(data []byte) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if !slices.Contains(ValidStatus, temp.Status) {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "invalid status: %s, allowed status are: %v", temp.Status, ValidStatus)
|
||||
if temp.Status != StatusDisabled {
|
||||
return errors.Newf(errors.TypeInvalidInput, ErrCodeServiceAccountInvalidInput, "invalid status: %s, allowed status are: %v", temp.Status, StatusDisabled)
|
||||
}
|
||||
|
||||
*sa = UpdatableServiceAccountStatus(temp)
|
||||
|
||||
@@ -10,6 +10,7 @@ type Store interface {
|
||||
// Service Account
|
||||
Create(context.Context, *StorableServiceAccount) error
|
||||
Get(context.Context, valuer.UUID, valuer.UUID) (*StorableServiceAccount, error)
|
||||
GetActiveByOrgIDAndName(context.Context, valuer.UUID, string) (*StorableServiceAccount, error)
|
||||
GetByID(context.Context, valuer.UUID) (*StorableServiceAccount, error)
|
||||
List(context.Context, valuer.UUID) ([]*StorableServiceAccount, error)
|
||||
Update(context.Context, valuer.UUID, *StorableServiceAccount) error
|
||||
|
||||
@@ -58,8 +58,6 @@ def build_builder_query(
|
||||
step_interval: int = DEFAULT_STEP_INTERVAL,
|
||||
group_by: Optional[List[str]] = None,
|
||||
filter_expression: Optional[str] = None,
|
||||
order_by: Optional[List[Dict]] = None,
|
||||
limit: Optional[int] = None,
|
||||
functions: Optional[List[Dict]] = None,
|
||||
disabled: bool = False,
|
||||
) -> Dict:
|
||||
@@ -95,12 +93,6 @@ def build_builder_query(
|
||||
if filter_expression:
|
||||
spec["filter"] = {"expression": filter_expression}
|
||||
|
||||
if order_by:
|
||||
spec["order"] = order_by
|
||||
|
||||
if limit is not None:
|
||||
spec["limit"] = limit
|
||||
|
||||
if functions:
|
||||
spec["functions"] = functions
|
||||
|
||||
|
||||
@@ -2,20 +2,16 @@
|
||||
Look at the cumulative_counters_1h.jsonl file for the relevant data
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Callable, List, Optional, Union
|
||||
|
||||
import pytest
|
||||
from typing import Any, Callable, List
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
build_order_by,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
@@ -75,200 +71,16 @@ def test_rate_with_steady_values_and_reset(
|
||||
assert v["value"] >= 0, f"Rate should not be negative: {v['value']}"
|
||||
|
||||
|
||||
def _assert_endpoint_rate_values(endpoint_values: dict) -> None:
|
||||
# /health: 60 data points (t01-t60), steady +10/min
|
||||
# rate = 10/60 = 0.167
|
||||
if "/health" in endpoint_values:
|
||||
health_values = endpoint_values["/health"]
|
||||
assert (
|
||||
len(health_values) >= 58
|
||||
), f"Expected >= 58 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(1 for v in health_values if v["value"] == 0.167)
|
||||
assert (
|
||||
count_steady_health >= 57
|
||||
), f"Expected >= 57 steady rate values (0.167) for /health, got {count_steady_health}"
|
||||
# all /health rates should be 0.167 except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert v["value"] == 0.167, f"Expected /health rate 0.167, got {v['value']}"
|
||||
|
||||
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
|
||||
# rate = 20/60 = 0.333, gap causes lower averaged rate at boundary
|
||||
if "/products" in endpoint_values:
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) >= 49
|
||||
), f"Expected >= 49 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(1 for v in products_values if v["value"] == 0.333)
|
||||
# most values should be 0.333, some boundary values differ due to 10-min gap
|
||||
assert (
|
||||
count_steady_products >= 46
|
||||
), f"Expected >= 46 steady rate values (0.333) for /products, got {count_steady_products}"
|
||||
# check that non-0.333 values are due to gap averaging (should be lower)
|
||||
gap_boundary_values = [v["value"] for v in products_values if v["value"] != 0.333]
|
||||
for val in gap_boundary_values:
|
||||
assert (
|
||||
0 < val < 0.333
|
||||
), f"Gap boundary values should be between 0 and 0.333, got {val}"
|
||||
|
||||
# /checkout: 61 data points (t00-t60), +1/min normal, +50/min spike at t40-t44
|
||||
# normal rate = 1/60 = 0.0167, spike rate = 50/60 = 0.833
|
||||
if "/checkout" in endpoint_values:
|
||||
checkout_values = endpoint_values["/checkout"]
|
||||
assert (
|
||||
len(checkout_values) >= 59
|
||||
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == 0.0167)
|
||||
assert (
|
||||
count_steady_checkout >= 53
|
||||
), f"Expected >= 53 steady rate values (0.0167) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == 0.833)
|
||||
assert (
|
||||
count_spike_checkout >= 4
|
||||
), f"Expected >= 4 spike rate values (0.833) for /checkout, got {count_spike_checkout}"
|
||||
# spike values should be consecutive
|
||||
spike_indices = [
|
||||
i for i, v in enumerate[Any](checkout_values) if v["value"] == 0.833
|
||||
]
|
||||
assert len(spike_indices) >= 4, f"Expected >= 4 spike indices, got {spike_indices}"
|
||||
for i in range(1, len(spike_indices)):
|
||||
assert (
|
||||
spike_indices[i] == spike_indices[i - 1] + 1
|
||||
), f"Spike indices should be consecutive, got {spike_indices}"
|
||||
|
||||
# /orders: 60 data points (t00-t60) with gap at t30, counter reset at t31 (150->2)
|
||||
# rate = 5/60 = 0.0833
|
||||
# reset at t31 causes: rate at t30 includes gap (lower), t31 has high rate after reset
|
||||
if "/orders" in endpoint_values:
|
||||
orders_values = endpoint_values["/orders"]
|
||||
assert (
|
||||
len(orders_values) >= 58
|
||||
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(1 for v in orders_values if v["value"] == 0.0833)
|
||||
assert (
|
||||
count_steady_orders >= 55
|
||||
), f"Expected >= 55 steady rate values (0.0833) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [v["value"] for v in orders_values if v["value"] != 0.0833]
|
||||
assert (
|
||||
len(non_standard_orders) >= 2
|
||||
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
# post-reset value should be higher (new counter value / interval)
|
||||
high_rate_orders = [v for v in non_standard_orders if v > 0.0833]
|
||||
assert (
|
||||
len(high_rate_orders) >= 1
|
||||
), f"Expected at least one high rate value after counter reset, got {non_standard_orders}"
|
||||
|
||||
# /users: 56 data points (t05-t60), sparse +1 every 5 minutes
|
||||
# Rate = 1/60 = 0.0167 during increment, 0 during flat periods
|
||||
if "/users" in endpoint_values:
|
||||
users_values = endpoint_values["/users"]
|
||||
assert (
|
||||
len(users_values) >= 54
|
||||
), f"Expected >= 54 values for /users, got {len(users_values)}"
|
||||
count_zero_users = sum(1 for v in users_values if v["value"] == 0)
|
||||
# most values should be 0 (flat periods between increments)
|
||||
assert (
|
||||
count_zero_users >= 40
|
||||
), f"Expected >= 40 zero rate values for /users (sparse data), got {count_zero_users}"
|
||||
# non-zero values should be 0.0167 (1/60 increment rate)
|
||||
non_zero_users = [v["value"] for v in users_values if v["value"] != 0]
|
||||
count_increment_rate = sum(1 for v in non_zero_users if v == 0.0167)
|
||||
assert (
|
||||
count_increment_rate >= 8
|
||||
), f"Expected >= 8 increment rate values (0.0167) for /users, got {count_increment_rate}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"metric_suffix,order_by,limit,expected_count,expected_endpoints",
|
||||
[
|
||||
(
|
||||
"no_order",
|
||||
None, # this is equivalent to sum(metric_name)
|
||||
None,
|
||||
5,
|
||||
["/products", "/health", "/checkout", "/orders", "/users"],
|
||||
),
|
||||
(
|
||||
"only_limit",
|
||||
None, # this is equivalent to sum(metric_name)
|
||||
3,
|
||||
3,
|
||||
["/products", "/health", "/checkout"],
|
||||
),
|
||||
(
|
||||
"asc",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
None,
|
||||
5,
|
||||
["/checkout", "/health", "/orders", "/products", "/users"],
|
||||
),
|
||||
(
|
||||
"asc_lim3",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
3,
|
||||
3,
|
||||
["/checkout", "/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"desc",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
None,
|
||||
5,
|
||||
["/users", "/products", "/orders", "/health", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_lim3",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
3,
|
||||
3,
|
||||
["/users", "/products", "/orders"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name",
|
||||
[build_order_by("sum(test_rate_groupby_asc_metric_name)", "asc")],
|
||||
None,
|
||||
5,
|
||||
["/users", "/orders", "/checkout", "/health", "/products"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name_lim3",
|
||||
[build_order_by("sum(test_rate_groupby_asc_metric_name_lim3)", "asc")],
|
||||
3,
|
||||
3,
|
||||
["/users", "/orders", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name",
|
||||
[build_order_by("sum(test_rate_groupby_desc_metric_name)", "desc")],
|
||||
None,
|
||||
5,
|
||||
["/products", "/health", "/checkout", "/orders", "/users"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name_lim3",
|
||||
[build_order_by("sum(test_rate_groupby_desc_metric_name_lim3)", "desc")],
|
||||
3,
|
||||
3,
|
||||
["/products", "/health", "/checkout"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_rate_group_by_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
metric_suffix: str,
|
||||
order_by: Optional[List],
|
||||
limit: Optional[int],
|
||||
expected_count: int,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_rate_groupby_{metric_suffix}"
|
||||
metric_name = "test_rate_groupby"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
CUMULATIVE_COUNTERS_FILE,
|
||||
@@ -285,8 +97,6 @@ def test_rate_group_by_endpoint(
|
||||
"sum",
|
||||
temporality="cumulative",
|
||||
group_by=["endpoint"],
|
||||
order_by=order_by,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
@@ -295,23 +105,10 @@ def test_rate_group_by_endpoint(
|
||||
data = response.json()
|
||||
all_series = get_all_series(data, "A")
|
||||
|
||||
# Should have 5 different endpoints
|
||||
assert (
|
||||
len(all_series) == expected_count
|
||||
), f"Expected {expected_count} series, got {len(all_series)}"
|
||||
|
||||
endpoint_labels = [
|
||||
series.get("labels", [{}])[0].get("value", "unknown")
|
||||
for series in all_series
|
||||
]
|
||||
|
||||
if isinstance(expected_endpoints, set):
|
||||
assert (
|
||||
set(endpoint_labels) == expected_endpoints
|
||||
), f"Expected endpoints {expected_endpoints}, got {set(endpoint_labels)}"
|
||||
else:
|
||||
assert endpoint_labels == expected_endpoints, (
|
||||
f"Expected endpoints {expected_endpoints}, got {endpoint_labels}"
|
||||
)
|
||||
len(all_series) == 5
|
||||
), f"Expected 5 series for 5 endpoints, got {len(all_series)}"
|
||||
|
||||
# endpoint -> values
|
||||
endpoint_values = {}
|
||||
@@ -320,6 +117,11 @@ def test_rate_group_by_endpoint(
|
||||
values = sorted(series.get("values", []), key=lambda x: x["timestamp"])
|
||||
endpoint_values[endpoint] = values
|
||||
|
||||
expected_endpoints = {"/products", "/health", "/checkout", "/orders", "/users"}
|
||||
assert (
|
||||
set(endpoint_values.keys()) == expected_endpoints
|
||||
), f"Expected endpoints {expected_endpoints}, got {set(endpoint_values.keys())}"
|
||||
|
||||
# at no point rate should be negative
|
||||
for endpoint, values in endpoint_values.items():
|
||||
for v in values:
|
||||
@@ -327,4 +129,103 @@ def test_rate_group_by_endpoint(
|
||||
v["value"] >= 0
|
||||
), f"Rate for {endpoint} should not be negative: {v['value']}"
|
||||
|
||||
_assert_endpoint_rate_values(endpoint_values)
|
||||
# /health: 60 data points (t01-t60), steady +10/min
|
||||
# rate = 10/60 = 0.167
|
||||
health_values = endpoint_values["/health"]
|
||||
assert (
|
||||
len(health_values) >= 58
|
||||
), f"Expected >= 58 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(1 for v in health_values if v["value"] == 0.167)
|
||||
assert (
|
||||
count_steady_health >= 57
|
||||
), f"Expected >= 57 steady rate values (0.167) for /health, got {count_steady_health}"
|
||||
# all /health rates should be 0.167 except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert v["value"] == 0.167, f"Expected /health rate 0.167, got {v['value']}"
|
||||
|
||||
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
|
||||
# rate = 20/60 = 0.333, gap causes lower averaged rate at boundary
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) >= 49
|
||||
), f"Expected >= 49 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(1 for v in products_values if v["value"] == 0.333)
|
||||
|
||||
# most values should be 0.333, some boundary values differ due to 10-min gap
|
||||
assert (
|
||||
count_steady_products >= 46
|
||||
), f"Expected >= 46 steady rate values (0.333) for /products, got {count_steady_products}"
|
||||
|
||||
# check that non-0.333 values are due to gap averaging (should be lower)
|
||||
gap_boundary_values = [v["value"] for v in products_values if v["value"] != 0.333]
|
||||
for val in gap_boundary_values:
|
||||
assert (
|
||||
0 < val < 0.333
|
||||
), f"Gap boundary values should be between 0 and 0.333, got {val}"
|
||||
|
||||
# /checkout: 61 data points (t00-t60), +1/min normal, +50/min spike at t40-t44
|
||||
# normal rate = 1/60 = 0.0167, spike rate = 50/60 = 0.833
|
||||
checkout_values = endpoint_values["/checkout"]
|
||||
assert (
|
||||
len(checkout_values) >= 59
|
||||
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == 0.0167)
|
||||
assert (
|
||||
count_steady_checkout >= 53
|
||||
), f"Expected >= 53 steady rate values (0.0167) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == 0.833)
|
||||
assert (
|
||||
count_spike_checkout >= 4
|
||||
), f"Expected >= 4 spike rate values (0.833) for /checkout, got {count_spike_checkout}"
|
||||
|
||||
# spike values should be consecutive
|
||||
spike_indices = [
|
||||
i for i, v in enumerate[Any](checkout_values) if v["value"] == 0.833
|
||||
]
|
||||
assert len(spike_indices) >= 4, f"Expected >= 4 spike indices, got {spike_indices}"
|
||||
# consecutiveness
|
||||
for i in range(1, len(spike_indices)):
|
||||
assert (
|
||||
spike_indices[i] == spike_indices[i - 1] + 1
|
||||
), f"Spike indices should be consecutive, got {spike_indices}"
|
||||
|
||||
# /orders: 60 data points (t00-t60) with gap at t30, counter reset at t31 (150->2)
|
||||
# rate = 5/60 = 0.0833
|
||||
# reset at t31 causes: rate at t30 includes gap (lower), t31 has high rate after reset
|
||||
orders_values = endpoint_values["/orders"]
|
||||
assert (
|
||||
len(orders_values) >= 58
|
||||
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(1 for v in orders_values if v["value"] == 0.0833)
|
||||
assert (
|
||||
count_steady_orders >= 55
|
||||
), f"Expected >= 55 steady rate values (0.0833) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [v["value"] for v in orders_values if v["value"] != 0.0833]
|
||||
assert (
|
||||
len(non_standard_orders) >= 2
|
||||
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
# post-reset value should be higher (new counter value / interval)
|
||||
high_rate_orders = [v for v in non_standard_orders if v > 0.0833]
|
||||
assert (
|
||||
len(high_rate_orders) >= 1
|
||||
), f"Expected at least one high rate value after counter reset, got {non_standard_orders}"
|
||||
|
||||
# /users: 56 data points (t05-t60), sparse +1 every 5 minutes
|
||||
# Rate = 1/60 = 0.0167 during increment, 0 during flat periods
|
||||
users_values = endpoint_values["/users"]
|
||||
assert (
|
||||
len(users_values) >= 54
|
||||
), f"Expected >= 54 values for /users, got {len(users_values)}"
|
||||
count_zero_users = sum(1 for v in users_values if v["value"] == 0)
|
||||
# most values should be 0 (flat periods between increments)
|
||||
assert (
|
||||
count_zero_users >= 40
|
||||
), f"Expected >= 40 zero rate values for /users (sparse data), got {count_zero_users}"
|
||||
# non-zero values should be 0.0167 (1/60 increment rate)
|
||||
non_zero_users = [v["value"] for v in users_values if v["value"] != 0]
|
||||
count_increment_rate = sum(1 for v in non_zero_users if v == 0.0167)
|
||||
assert (
|
||||
count_increment_rate >= 8
|
||||
), f"Expected >= 8 increment rate values (0.0167) for /users, got {count_increment_rate}"
|
||||
|
||||
@@ -5,7 +5,7 @@ Look at the multi_temporality_counters_1h.jsonl file for the relevant data
|
||||
import random
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List, Optional, Union
|
||||
from typing import Callable, List
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -14,7 +14,6 @@ from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
build_order_by,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
@@ -92,198 +91,6 @@ def test_with_steady_values_and_reset(
|
||||
), f"{time_aggregation} should not be negative: {v['value']}"
|
||||
|
||||
|
||||
def _assert_endpoint_group_values( # pylint: disable=too-many-arguments
|
||||
endpoint_values: dict,
|
||||
stable_health_value: float,
|
||||
stable_products_value: float,
|
||||
stable_checkout_value: float,
|
||||
spike_checkout_value: float,
|
||||
stable_orders_value: float,
|
||||
spike_users_value: float,
|
||||
time_aggregation: str,
|
||||
) -> None:
|
||||
# /health: 60 data points (t01-t60), steady +10/min
|
||||
if "/health" in endpoint_values:
|
||||
health_values = endpoint_values["/health"]
|
||||
assert (
|
||||
len(health_values) >= 58
|
||||
), f"Expected >= 58 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(
|
||||
1 for v in health_values if v["value"] == stable_health_value
|
||||
)
|
||||
assert (
|
||||
count_steady_health >= 57
|
||||
), f"Expected >= 57 steady rate values ({stable_health_value}) for /health, got {count_steady_health}"
|
||||
# all /health rates should be stable except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert (
|
||||
v["value"] == stable_health_value
|
||||
), f"Expected /health rate {stable_health_value}, got {v['value']}"
|
||||
|
||||
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
|
||||
if "/products" in endpoint_values:
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) >= 49
|
||||
), f"Expected >= 49 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(
|
||||
1 for v in products_values if v["value"] == stable_products_value
|
||||
)
|
||||
# most values should be stable, some boundary values differ due to 10-min gap
|
||||
assert (
|
||||
count_steady_products >= 46
|
||||
), f"Expected >= 46 steady rate values ({stable_products_value}) for /products, got {count_steady_products}"
|
||||
# check that non-stable values are due to gap averaging (should be lower)
|
||||
gap_boundary_values = [
|
||||
v["value"] for v in products_values if v["value"] != stable_products_value
|
||||
]
|
||||
for val in gap_boundary_values:
|
||||
assert (
|
||||
0 < val < stable_products_value
|
||||
), f"Gap boundary values should be between 0 and {stable_products_value}, got {val}"
|
||||
|
||||
# /checkout: 61 data points (t00-t60), +1/min normal, +50/min spike at t40-t44
|
||||
if "/checkout" in endpoint_values:
|
||||
checkout_values = endpoint_values["/checkout"]
|
||||
assert (
|
||||
len(checkout_values) >= 59
|
||||
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(
|
||||
1 for v in checkout_values if v["value"] == stable_checkout_value
|
||||
)
|
||||
assert (
|
||||
count_steady_checkout >= 53
|
||||
), f"Expected >= 53 steady {time_aggregation} values ({stable_checkout_value}) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(
|
||||
1 for v in checkout_values if v["value"] == spike_checkout_value
|
||||
)
|
||||
assert (
|
||||
count_spike_checkout >= 4
|
||||
), f"Expected >= 4 spike {time_aggregation} values ({spike_checkout_value}) for /checkout, got {count_spike_checkout}"
|
||||
# spike values should be consecutive
|
||||
spike_indices = [
|
||||
i for i, v in enumerate(checkout_values) if v["value"] == spike_checkout_value
|
||||
]
|
||||
assert len(spike_indices) >= 4, f"Expected >= 4 spike indices, got {spike_indices}"
|
||||
for i in range(1, len(spike_indices)):
|
||||
assert (
|
||||
spike_indices[i] == spike_indices[i - 1] + 1
|
||||
), f"Spike indices should be consecutive, got {spike_indices}"
|
||||
|
||||
# /orders: 60 data points (t00-t60) with gap at t30, counter reset at t31 (150->2)
|
||||
# reset at t31 causes: rate/increase at t30 includes gap (lower), t31 has high rate after reset
|
||||
if "/orders" in endpoint_values:
|
||||
orders_values = endpoint_values["/orders"]
|
||||
assert (
|
||||
len(orders_values) >= 58
|
||||
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(
|
||||
1 for v in orders_values if v["value"] == stable_orders_value
|
||||
)
|
||||
assert (
|
||||
count_steady_orders >= 55
|
||||
), f"Expected >= 55 steady {time_aggregation} values ({stable_orders_value}) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [
|
||||
v["value"] for v in orders_values if v["value"] != stable_orders_value
|
||||
]
|
||||
assert (
|
||||
len(non_standard_orders) >= 2
|
||||
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
# post-reset value should be higher (new counter value / interval)
|
||||
high_rate_orders = [v for v in non_standard_orders if v > stable_orders_value]
|
||||
assert (
|
||||
len(high_rate_orders) >= 1
|
||||
), f"Expected at least one high {time_aggregation} value after counter reset, got {non_standard_orders}"
|
||||
|
||||
# /users: 56 data points (t05-t60), sparse +1 every 5 minutes
|
||||
if "/users" in endpoint_values:
|
||||
users_values = endpoint_values["/users"]
|
||||
assert (
|
||||
len(users_values) >= 54
|
||||
), f"Expected >= 54 values for /users, got {len(users_values)}"
|
||||
count_zero_users = sum(1 for v in users_values if v["value"] == 0)
|
||||
# most values should be 0 (flat periods between increments)
|
||||
assert (
|
||||
count_zero_users >= 40
|
||||
), f"Expected >= 40 zero {time_aggregation} values for /users (sparse data), got {count_zero_users}"
|
||||
# non-zero values should be stable increment rate
|
||||
non_zero_users = [v["value"] for v in users_values if v["value"] != 0]
|
||||
count_increment_rate = sum(1 for v in non_zero_users if v == spike_users_value)
|
||||
assert (
|
||||
count_increment_rate >= 8
|
||||
), f"Expected >= 8 increment {time_aggregation} values ({spike_users_value}) for /users, got {count_increment_rate}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_suffix,order_by_spec,limit,expected_count,expected_endpoints",
|
||||
[
|
||||
(
|
||||
"no_order",
|
||||
None,
|
||||
None,
|
||||
5,
|
||||
["/products", "/health", "/checkout", "/orders", "/users"],
|
||||
),
|
||||
(
|
||||
"asc",
|
||||
("endpoint", "asc"),
|
||||
None,
|
||||
5,
|
||||
["/checkout", "/health", "/orders", "/products", "/users"],
|
||||
),
|
||||
(
|
||||
"asc_lim3",
|
||||
("endpoint", "asc"),
|
||||
3,
|
||||
3,
|
||||
["/checkout", "/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"desc",
|
||||
("endpoint", "desc"),
|
||||
None,
|
||||
5,
|
||||
["/users", "/products", "/orders", "/health", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_lim3",
|
||||
("endpoint", "desc"),
|
||||
3,
|
||||
3,
|
||||
["/users", "/products", "/orders"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name",
|
||||
("sum_metric", "asc"),
|
||||
None,
|
||||
5,
|
||||
["/users", "/orders", "/checkout", "/health", "/products"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name_lim3",
|
||||
("sum_metric", "asc"),
|
||||
3,
|
||||
3,
|
||||
["/users", "/orders", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name",
|
||||
("sum_metric", "desc"),
|
||||
None,
|
||||
5,
|
||||
["/products", "/health", "/checkout", "/orders", "/users"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name_lim3",
|
||||
("sum_metric", "desc"),
|
||||
3,
|
||||
3,
|
||||
["/products", "/health", "/checkout"],
|
||||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"time_aggregation, stable_health_value, stable_products_value, stable_checkout_value, spike_checkout_value, stable_orders_value, spike_users_value",
|
||||
[
|
||||
@@ -303,24 +110,11 @@ def test_group_by_endpoint(
|
||||
spike_checkout_value: float,
|
||||
stable_orders_value: float,
|
||||
spike_users_value: float,
|
||||
order_suffix: str,
|
||||
order_by_spec: Optional[tuple],
|
||||
limit: Optional[int],
|
||||
expected_count: int,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_{time_aggregation}_groupby_{order_suffix}"
|
||||
|
||||
# Build order_by at runtime so metric name reflects actual time_aggregation
|
||||
order_by = None
|
||||
if order_by_spec is not None:
|
||||
key, direction = order_by_spec
|
||||
if key == "sum_metric":
|
||||
key = f"sum({metric_name})"
|
||||
order_by = [build_order_by(key, direction)]
|
||||
metric_name = f"test_{time_aggregation}_groupby"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
MULTI_TEMPORALITY_FILE,
|
||||
@@ -336,8 +130,6 @@ def test_group_by_endpoint(
|
||||
time_aggregation,
|
||||
"sum",
|
||||
group_by=["endpoint"],
|
||||
order_by=order_by,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
@@ -345,23 +137,10 @@ def test_group_by_endpoint(
|
||||
|
||||
data = response.json()
|
||||
all_series = get_all_series(data, "A")
|
||||
# Should have 5 different endpoints
|
||||
assert (
|
||||
len(all_series) == expected_count
|
||||
), f"Expected {expected_count} series, got {len(all_series)}"
|
||||
|
||||
endpoint_labels = [
|
||||
series.get("labels", [{}])[0].get("value", "unknown")
|
||||
for series in all_series
|
||||
]
|
||||
|
||||
if isinstance(expected_endpoints, set):
|
||||
assert (
|
||||
set(endpoint_labels) == expected_endpoints
|
||||
), f"Expected endpoints {expected_endpoints}, got {set(endpoint_labels)}"
|
||||
else:
|
||||
assert endpoint_labels == expected_endpoints, (
|
||||
f"Expected endpoints {expected_endpoints}, got {endpoint_labels}"
|
||||
)
|
||||
len(all_series) == 5
|
||||
), f"Expected 5 series for 5 endpoints, got {len(all_series)}"
|
||||
|
||||
# endpoint -> values
|
||||
endpoint_values = {}
|
||||
@@ -370,6 +149,11 @@ def test_group_by_endpoint(
|
||||
values = sorted(series.get("values", []), key=lambda x: x["timestamp"])
|
||||
endpoint_values[endpoint] = values
|
||||
|
||||
expected_endpoints = {"/products", "/health", "/checkout", "/orders", "/users"}
|
||||
assert (
|
||||
set(endpoint_values.keys()) == expected_endpoints
|
||||
), f"Expected endpoints {expected_endpoints}, got {set(endpoint_values.keys())}"
|
||||
|
||||
# at no point rate should be negative
|
||||
for endpoint, values in endpoint_values.items():
|
||||
for v in values:
|
||||
@@ -377,16 +161,117 @@ def test_group_by_endpoint(
|
||||
v["value"] >= 0
|
||||
), f"Rate for {endpoint} should not be negative: {v['value']}"
|
||||
|
||||
_assert_endpoint_group_values(
|
||||
endpoint_values,
|
||||
stable_health_value,
|
||||
stable_products_value,
|
||||
stable_checkout_value,
|
||||
spike_checkout_value,
|
||||
stable_orders_value,
|
||||
spike_users_value,
|
||||
time_aggregation,
|
||||
# /health: 60 data points (t01-t60), steady +10/min
|
||||
health_values = endpoint_values["/health"]
|
||||
assert (
|
||||
len(health_values) >= 58
|
||||
), f"Expected >= 58 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(
|
||||
1 for v in health_values if v["value"] == stable_health_value
|
||||
)
|
||||
assert (
|
||||
count_steady_health >= 57
|
||||
), f"Expected >= 57 steady rate values ({stable_health_value}) for /health, got {count_steady_health}"
|
||||
# all /health rates should be state except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert (
|
||||
v["value"] == stable_health_value
|
||||
), f"Expected /health rate {stable_health_value}, got {v['value']}"
|
||||
|
||||
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) >= 49
|
||||
), f"Expected >= 49 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(
|
||||
1 for v in products_values if v["value"] == stable_products_value
|
||||
)
|
||||
|
||||
# most values should be stable, some boundary values differ due to 10-min gap
|
||||
assert (
|
||||
count_steady_products >= 46
|
||||
), f"Expected >= 46 steady rate values ({stable_products_value}) for /products, got {count_steady_products}"
|
||||
|
||||
# check that non-stable values are due to gap averaging (should be lower)
|
||||
gap_boundary_values = [
|
||||
v["value"] for v in products_values if v["value"] != stable_products_value
|
||||
]
|
||||
for val in gap_boundary_values:
|
||||
assert (
|
||||
0 < val < stable_products_value
|
||||
), f"Gap boundary values should be between 0 and {stable_products_value}, got {val}"
|
||||
|
||||
# /checkout: 61 data points (t00-t60), +1/min normal, +50/min spike at t40-t44
|
||||
checkout_values = endpoint_values["/checkout"]
|
||||
assert (
|
||||
len(checkout_values) >= 59
|
||||
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(
|
||||
1 for v in checkout_values if v["value"] == stable_checkout_value
|
||||
)
|
||||
assert (
|
||||
count_steady_checkout >= 53
|
||||
), f"Expected >= 53 steady {time_aggregation} values ({stable_checkout_value}) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(
|
||||
1 for v in checkout_values if v["value"] == spike_checkout_value
|
||||
)
|
||||
assert (
|
||||
count_spike_checkout >= 4
|
||||
), f"Expected >= 4 spike {time_aggregation} values ({spike_checkout_value}) for /checkout, got {count_spike_checkout}"
|
||||
|
||||
# spike values should be consecutive
|
||||
spike_indices = [
|
||||
i for i, v in enumerate(checkout_values) if v["value"] == spike_checkout_value
|
||||
]
|
||||
assert len(spike_indices) >= 4, f"Expected >= 4 spike indices, got {spike_indices}"
|
||||
# consecutiveness
|
||||
for i in range(1, len(spike_indices)):
|
||||
assert (
|
||||
spike_indices[i] == spike_indices[i - 1] + 1
|
||||
), f"Spike indices should be consecutive, got {spike_indices}"
|
||||
|
||||
# /orders: 60 data points (t00-t60) with gap at t30, counter reset at t31 (150->2)
|
||||
# reset at t31 causes: rate/increase at t30 includes gap (lower), t31 has high rate after reset
|
||||
orders_values = endpoint_values["/orders"]
|
||||
assert (
|
||||
len(orders_values) >= 58
|
||||
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(
|
||||
1 for v in orders_values if v["value"] == stable_orders_value
|
||||
)
|
||||
assert (
|
||||
count_steady_orders >= 55
|
||||
), f"Expected >= 55 steady {time_aggregation} values ({stable_orders_value}) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [
|
||||
v["value"] for v in orders_values if v["value"] != stable_orders_value
|
||||
]
|
||||
assert (
|
||||
len(non_standard_orders) >= 2
|
||||
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
# post-reset value should be higher (new counter value / interval)
|
||||
high_rate_orders = [v for v in non_standard_orders if v > stable_orders_value]
|
||||
assert (
|
||||
len(high_rate_orders) >= 1
|
||||
), f"Expected at least one high {time_aggregation} value after counter reset, got {non_standard_orders}"
|
||||
|
||||
# /users: 56 data points (t05-t60), sparse +1 every 5 minutes
|
||||
users_values = endpoint_values["/users"]
|
||||
assert (
|
||||
len(users_values) >= 54
|
||||
), f"Expected >= 54 values for /users, got {len(users_values)}"
|
||||
count_zero_users = sum(1 for v in users_values if v["value"] == 0)
|
||||
# most values should be 0 (flat periods between increments)
|
||||
assert (
|
||||
count_zero_users >= 40
|
||||
), f"Expected >= 40 zero {time_aggregation} values for /users (sparse data), got {count_zero_users}"
|
||||
# non-zero values should be 0.0167 (1/60 increment rate)
|
||||
non_zero_users = [v["value"] for v in users_values if v["value"] != 0]
|
||||
count_increment_rate = sum(1 for v in non_zero_users if v == spike_users_value)
|
||||
assert (
|
||||
count_increment_rate >= 8
|
||||
), f"Expected >= 8 increment {time_aggregation} values ({spike_users_value}) for /users, got {count_increment_rate}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
Look at the histogram_data_1h.jsonl file for the relevant data
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Callable, List, Optional, Union
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from typing import Callable, List
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -16,7 +13,6 @@ from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
build_order_by,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
@@ -24,7 +20,6 @@ from fixtures.querier import (
|
||||
from fixtures.utils import get_testdata_file_path
|
||||
|
||||
FILE = get_testdata_file_path("histogram_data_1h.jsonl")
|
||||
FILE_WITH_MANY_GROUPS = get_testdata_file_path("histogram_data_1h_many_groups.jsonl")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -526,568 +521,4 @@ def test_histogram_percentile_for_delta_service(
|
||||
assert len(result_values) == 60
|
||||
assert result_values[0]["value"] == zeroth_value
|
||||
assert result_values[1]["value"] == first_value
|
||||
assert result_values[-1]["value"] == last_value
|
||||
|
||||
|
||||
def _assert_series_endpoint_labels(
|
||||
series: list,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
prefix: str,
|
||||
) -> None:
|
||||
labels = [s.get("labels", [{}])[0].get("value", "unknown") for s in series]
|
||||
if isinstance(expected_endpoints, set):
|
||||
assert (
|
||||
set(labels) == expected_endpoints
|
||||
), f"Expected {prefix} endpoints {expected_endpoints}, got {set(labels)}"
|
||||
else:
|
||||
assert labels == expected_endpoints, (
|
||||
f"Expected {prefix} endpoints in order {expected_endpoints}, got {labels}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_suffix,order_by,limit,expected_count,expected_endpoints",
|
||||
[
|
||||
(
|
||||
"no_order",
|
||||
None,
|
||||
None,
|
||||
3,
|
||||
["/checkout", "/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"asc",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
None,
|
||||
3,
|
||||
["/checkout", "/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"asc_lim2",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
2,
|
||||
2,
|
||||
["/checkout", "/health"],
|
||||
),
|
||||
(
|
||||
"desc",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
None,
|
||||
3,
|
||||
["/orders", "/health", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_lim2",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
2,
|
||||
2,
|
||||
["/orders", "/health"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name",
|
||||
[build_order_by("count(test_histogram_count_groupby_asc_metric_name)", "asc")],
|
||||
None,
|
||||
3,
|
||||
["/health", "/orders", "/checkout"], ## health and orders have the same size so they are then sorted endpoint as a tiebreaker
|
||||
),
|
||||
(
|
||||
"asc_metric_name_lim2",
|
||||
[build_order_by("count(test_histogram_count_groupby_asc_metric_name_lim2)", "asc")],
|
||||
2,
|
||||
2,
|
||||
["/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name",
|
||||
[build_order_by("count(test_histogram_count_groupby_desc_metric_name)", "desc")],
|
||||
None,
|
||||
3,
|
||||
["/checkout", "/health", "/orders"], ## health and orders have the same size so they are then sorted endpoint as a tiebreaker
|
||||
),
|
||||
(
|
||||
"desc_metric_name_lim2",
|
||||
[build_order_by("count(test_histogram_count_groupby_desc_metric_name_lim2)", "desc")],
|
||||
2,
|
||||
2,
|
||||
["/checkout", "/health"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_histogram_count_group_by_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
order_suffix: str,
|
||||
order_by: Optional[List],
|
||||
limit: Optional[int],
|
||||
expected_count: int,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_histogram_count_groupby_{order_suffix}"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query_count = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"increase",
|
||||
"count",
|
||||
comparisonSpaceAggregationParam={"threshold": 1000, "operator": "<="},
|
||||
group_by=["endpoint"],
|
||||
order_by=order_by,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query_count])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
count_all_series = get_all_series(data, "A")
|
||||
|
||||
assert (
|
||||
len(count_all_series) == expected_count
|
||||
), f"Expected {expected_count} series, got {len(count_all_series)}"
|
||||
|
||||
_assert_series_endpoint_labels(count_all_series, expected_endpoints, "count")
|
||||
|
||||
count_values = {}
|
||||
for series in count_all_series:
|
||||
endpoint = series.get("labels", [{}])[0].get("value", "unknown")
|
||||
count_values[endpoint] = sorted(
|
||||
series.get("values", []), key=lambda x: x["timestamp"]
|
||||
)
|
||||
|
||||
for endpoint, values in count_values.items():
|
||||
for v in values:
|
||||
assert v["value"] >= 0, f"Count for {endpoint} should not be negative: {v['value']}"
|
||||
|
||||
# /health (cumulative, service=api): 59 points, increase starts at 11/min → 69/min
|
||||
if "/health" in count_values:
|
||||
vals = count_values["/health"]
|
||||
assert vals[0]["value"] == 11, f"Expected /health count first=11, got {vals[0]['value']}"
|
||||
assert vals[-1]["value"] == 69, f"Expected /health count last=69, got {vals[-1]['value']}"
|
||||
|
||||
# /orders (cumulative, service=api): same distribution as /health
|
||||
if "/orders" in count_values:
|
||||
vals = count_values["/orders"]
|
||||
assert vals[0]["value"] == 11, f"Expected /orders count first=11, got {vals[0]['value']}"
|
||||
assert vals[-1]["value"] == 69, f"Expected /orders count last=69, got {vals[-1]['value']}"
|
||||
|
||||
# /checkout (delta, service=web): 60 points, zeroth=12345 (raw delta), then 11/min → 69/min
|
||||
if "/checkout" in count_values:
|
||||
vals = count_values["/checkout"]
|
||||
assert vals[0]["value"] == 12345, f"Expected /checkout count zeroth=12345, got {vals[0]['value']}"
|
||||
assert vals[1]["value"] == 11, f"Expected /checkout count first=11, got {vals[1]['value']}"
|
||||
assert vals[-1]["value"] == 69, f"Expected /checkout count last=69, got {vals[-1]['value']}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_suffix,order_by,limit,expected_count,expected_endpoints",
|
||||
[
|
||||
(
|
||||
"no_order",
|
||||
None,
|
||||
None,
|
||||
4,
|
||||
[ "/checkout", "/health", "/orders", "/coupon"],
|
||||
),
|
||||
(
|
||||
"only_limit",
|
||||
None,
|
||||
1,
|
||||
1,
|
||||
[ "/checkout"], ##health and checkout have the same size so they are then sorted endpoint as a tiebreaker, and only checkout makes the limit
|
||||
),
|
||||
(
|
||||
"asc",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
None,
|
||||
4,
|
||||
[ "/checkout", "/coupon", "/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"asc_lim2",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
2,
|
||||
2,
|
||||
["/checkout", "/coupon"],
|
||||
),
|
||||
(
|
||||
"desc",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
None,
|
||||
4,
|
||||
["/orders", "/health", "/coupon", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_lim2",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
2,
|
||||
2,
|
||||
["/orders", "/health"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name",
|
||||
[build_order_by("p75(test_histogram_p75_groupby_asc_metric_name)", "asc")],
|
||||
None,
|
||||
4,
|
||||
["/coupon", "/orders", "/checkout", "/health"], ## health and checkout have the same size so they are then sorted endpoint as a tiebreaker
|
||||
),
|
||||
(
|
||||
"asc_metric_name_lim2",
|
||||
[build_order_by("p75(test_histogram_p75_groupby_asc_metric_name_lim2)", "asc")],
|
||||
2,
|
||||
2,
|
||||
["/coupon", "/orders"],
|
||||
),
|
||||
(
|
||||
"asc_metric_name_lim3",
|
||||
[build_order_by("p75(test_histogram_p75_groupby_asc_metric_name_lim3)", "asc")],
|
||||
3,
|
||||
3,
|
||||
["/coupon", "/orders", "/checkout"], ##health and checkout have the same size so they are then sorted endpoint as a tiebreaker, and only checkout makes the limit
|
||||
),
|
||||
(
|
||||
"desc_metric_name",
|
||||
[build_order_by("p75(test_histogram_p75_groupby_desc_metric_name)", "desc")],
|
||||
None,
|
||||
4,
|
||||
[ "/checkout", "/health", "/orders", "/coupon"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name_lim2",
|
||||
[build_order_by("p75(test_histogram_p75_groupby_desc_metric_name_lim2)", "desc")],
|
||||
2,
|
||||
2,
|
||||
[ "/checkout", "/health"],
|
||||
),
|
||||
(
|
||||
"desc_metric_name_lim2",
|
||||
[build_order_by("p75(test_histogram_p75_groupby_desc_metric_name_lim2)", "desc")],
|
||||
1,
|
||||
1,
|
||||
[ "/checkout"], ##health and checkout have the same size so they are then sorted endpoint as a tiebreaker, and only checkout makes the limit
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_histogram_percentile_group_by_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
order_suffix: str,
|
||||
order_by: Optional[List],
|
||||
limit: Optional[int],
|
||||
expected_count: int,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_histogram_p75_groupby_{order_suffix}"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE_WITH_MANY_GROUPS,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query_p75 = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"doesnotreallymatter",
|
||||
"p75",
|
||||
group_by=["endpoint"],
|
||||
order_by=order_by,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query_p75])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
p75_series = get_all_series(data, "A")
|
||||
|
||||
for series in p75_series:
|
||||
endpoint = series.get("labels", [{}])[0].get("value", "unknown")
|
||||
values = series.get("values", [])
|
||||
|
||||
assert (
|
||||
len(p75_series) == expected_count
|
||||
), f"Expected {expected_count} p75 series, got {len(p75_series)}"
|
||||
|
||||
_assert_series_endpoint_labels(p75_series, expected_endpoints, "p75")
|
||||
|
||||
p75_values = {}
|
||||
for series in p75_series:
|
||||
endpoint = series.get("labels", [{}])[0].get("value", "unknown")
|
||||
p75_values[endpoint] = sorted(
|
||||
series.get("values", []), key=lambda x: x["timestamp"]
|
||||
)
|
||||
|
||||
for endpoint, values in p75_values.items():
|
||||
for v in values:
|
||||
assert v["value"] >= 0, f"p75 for {endpoint} should not be negative: {v['value']}"
|
||||
|
||||
# /health (cumulative, service=api)
|
||||
if "/health" in p75_values:
|
||||
vals = p75_values["/health"]
|
||||
assert vals[0]["value"] == 6000, f"Expected /health p75 first=8000, got {vals[0]['value']}"
|
||||
assert vals[-1]["value"] == 6000, f"Expected /health p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
# /orders (cumulative, service=api): same distribution as /health
|
||||
if "/orders" in p75_values:
|
||||
vals = p75_values["/orders"]
|
||||
assert vals[0]["value"] == 4500, f"Expected /orders p75 first=6400, got {vals[0]['value']}"
|
||||
assert vals[-1]["value"] == 4500, f"Expected /orders p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
# /checkout (delta, service=web): 60 points
|
||||
if "/checkout" in p75_values:
|
||||
vals = p75_values["/checkout"]
|
||||
assert vals[0]["value"] == 6000, f"Expected /checkout p75 zeroth=900, got {vals[0]['value']}"
|
||||
assert vals[1]["value"] == 6000, f"Expected /checkout p75 first=6400, got {vals[1]['value']}"
|
||||
assert vals[-1]["value"] == 6000, f"Expected /checkout p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
# /coupon (delta, service=web): 60 points
|
||||
if "/coupon" in p75_values:
|
||||
vals = p75_values["/coupon"]
|
||||
assert vals[0]["value"] == 1125, f"Expected /coupon p75 zeroth=900, got {vals[0]['value']}"
|
||||
assert vals[1]["value"] == 1125, f"Expected /coupon p75 first=6400, got {vals[1]['value']}"
|
||||
assert vals[-1]["value"] == 1125, f"Expected /coupon p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_suffix,order_by,limit,expected_count,expected_endpoints, expected_status_codes",
|
||||
[
|
||||
(
|
||||
"no_order",
|
||||
None,
|
||||
None,
|
||||
5,
|
||||
[ "/checkout", "/health", "/orders", "/coupon", "/coupon"], ## coupon has 200 and 500 status codes so it will appear twice
|
||||
[ "200", "200", "200", "200", "500"],
|
||||
),
|
||||
(
|
||||
"only_limit",
|
||||
None,
|
||||
1,
|
||||
1,
|
||||
[ "/checkout"], ##health and checkout have the same size so they are then sorted endpoint as a tiebreaker, and only checkout makes the limit
|
||||
[ "200"]
|
||||
),
|
||||
(
|
||||
"asc_endpoint",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
None,
|
||||
5,
|
||||
[ "/checkout", "/coupon", "/coupon", "/health", "/orders"],
|
||||
[ "200", "200", "500", "200", "200"],
|
||||
),
|
||||
(
|
||||
"asc_endpoint_status_code",
|
||||
[build_order_by("endpoint", "asc"), build_order_by("status_code", "asc")],
|
||||
None,
|
||||
5,
|
||||
[ "/checkout", "/coupon", "/coupon", "/health", "/orders"],
|
||||
[ "200", "200", "500", "200", "200"],
|
||||
),
|
||||
(
|
||||
"asc_status_code_endpoint",
|
||||
[build_order_by("status_code", "asc"), build_order_by("endpoint", "asc")],
|
||||
None,
|
||||
5,
|
||||
[ "/checkout", "/coupon", "/health", "/orders", "/coupon"],
|
||||
[ "200", "200", "200", "200", "500"],
|
||||
),
|
||||
(
|
||||
"asc_endpoint_limit_2",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
2,
|
||||
2,
|
||||
[ "/checkout", "/coupon"],
|
||||
[ "200", "200"],
|
||||
),
|
||||
(
|
||||
"asc_endpoint_status_code_limit_2",
|
||||
[build_order_by("endpoint", "asc"), build_order_by("status_code", "asc")],
|
||||
2,
|
||||
2,
|
||||
[ "/checkout", "/coupon"],
|
||||
[ "200", "200"],
|
||||
),
|
||||
(
|
||||
"asc_status_code_endpoint_limit_4",
|
||||
[build_order_by("status_code", "asc"), build_order_by("endpoint", "asc")],
|
||||
4,
|
||||
4,
|
||||
[ "/checkout", "/coupon", "/health", "/orders"],
|
||||
[ "200", "200", "200", "200"],
|
||||
),
|
||||
(
|
||||
"desc_endpoint",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
None,
|
||||
5,
|
||||
["/orders", "/health", "/coupon", "/coupon", "/checkout"],
|
||||
[ "200", "200", "200", "500", "200"],
|
||||
),
|
||||
(
|
||||
"desc_endpoint_status_code",
|
||||
[build_order_by("endpoint", "desc"), build_order_by("status_code", "desc")],
|
||||
None,
|
||||
5,
|
||||
["/orders", "/health", "/coupon", "/coupon", "/checkout"],
|
||||
[ "200", "200", "500", "200", "200"],
|
||||
),
|
||||
(
|
||||
"desc_status_code_endpoint",
|
||||
[build_order_by("status_code", "desc"), build_order_by("endpoint", "desc")],
|
||||
None,
|
||||
5,
|
||||
["/coupon", "/orders", "/health", "/coupon", "/checkout"],
|
||||
[ "500", "200", "200", "200", "200"],
|
||||
),
|
||||
(
|
||||
"desc_endpoint_limit2",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
3,
|
||||
3,
|
||||
["/orders", "/health", "/coupon"],
|
||||
[ "200", "200", "200"],
|
||||
),
|
||||
(
|
||||
"desc_endpoint_status_code_limit3",
|
||||
[build_order_by("endpoint", "desc"), build_order_by("status_code", "desc")],
|
||||
3,
|
||||
3,
|
||||
["/orders", "/health", "/coupon"],
|
||||
[ "200", "200", "500"],
|
||||
),
|
||||
(
|
||||
"desc_status_code_endpoint_limit2",
|
||||
[build_order_by("status_code", "desc"), build_order_by("endpoint", "desc")],
|
||||
2,
|
||||
2,
|
||||
["/coupon", "/orders"],
|
||||
[ "500", "200"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_histogram_percentile_group_by_endpoint_and_status_code(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
order_suffix: str,
|
||||
order_by: Optional[List],
|
||||
limit: Optional[int],
|
||||
expected_count: int,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
expected_status_codes: Union[set, List[str]],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_histogram_p75_groupby_{order_suffix}"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
FILE_WITH_MANY_GROUPS,
|
||||
base_time=now - timedelta(minutes=60),
|
||||
metric_name_override=metric_name,
|
||||
)
|
||||
insert_metrics(metrics)
|
||||
|
||||
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
query_p75 = build_builder_query(
|
||||
"A",
|
||||
metric_name,
|
||||
"doesnotreallymatter",
|
||||
"p75",
|
||||
group_by=["endpoint", "status_code"],
|
||||
order_by=order_by,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query_p75])
|
||||
assert response.status_code == HTTPStatus.OK
|
||||
|
||||
data = response.json()
|
||||
p75_series = get_all_series(data, "A")
|
||||
|
||||
for series in p75_series:
|
||||
endpoint = series.get("labels", [{}])[0].get("value", "unknown")
|
||||
status_code = series.get("labels", [{}])[1].get("value", "unknown")
|
||||
values = series.get("values", [])
|
||||
avg = sum(v["value"] for v in values) / len(values) if values else 0
|
||||
logger.warning("endpoint=%s status_code=%s average_p75=%.3f", endpoint, status_code, avg)
|
||||
|
||||
assert (
|
||||
len(p75_series) == expected_count
|
||||
), f"Expected {expected_count} p75 series, got {len(p75_series)}"
|
||||
|
||||
_assert_series_endpoint_labels(p75_series, expected_endpoints, "p75")
|
||||
|
||||
endpoints = [s.get("labels", [{}])[0].get("value", "unknown") for s in p75_series]
|
||||
assert endpoints == expected_endpoints, (
|
||||
f"Expected p75 endpoints in order {expected_endpoints}, got {endpoints}"
|
||||
)
|
||||
status_codes = [s.get("labels", [{}])[1].get("value", "unknown") for s in p75_series]
|
||||
assert status_codes == expected_status_codes, (
|
||||
f"Expected p75 endpoints in order {expected_status_codes}, got {status_codes}"
|
||||
)
|
||||
|
||||
p75_values = {}
|
||||
for series in p75_series:
|
||||
endpoint = series.get("labels", [{}])[0].get("value", "unknown")
|
||||
status_code = series.get("labels", [{}])[1].get("value", "unknown")
|
||||
p75_values[endpoint+status_code] = sorted(
|
||||
series.get("values", []), key=lambda x: x["timestamp"]
|
||||
)
|
||||
|
||||
for endpoint, values in p75_values.items():
|
||||
for v in values:
|
||||
assert v["value"] >= 0, f"p75 for {endpoint} should not be negative: {v['value']}"
|
||||
|
||||
# /health (cumulative, service=api)
|
||||
if "/health200" in p75_values:
|
||||
vals = p75_values["/health200"]
|
||||
assert vals[0]["value"] == 6000, f"Expected /health p75 first=8000, got {vals[0]['value']}"
|
||||
assert vals[-1]["value"] == 6000, f"Expected /health p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
# /orders (cumulative, service=api): same distribution as /health
|
||||
if "/orders200" in p75_values:
|
||||
vals = p75_values["/orders200"]
|
||||
assert vals[0]["value"] == 4500, f"Expected /orders p75 first=6400, got {vals[0]['value']}"
|
||||
assert vals[-1]["value"] == 4500, f"Expected /orders p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
# /checkout (delta, service=web): 60 points
|
||||
if "/checkout200" in p75_values:
|
||||
vals = p75_values["/checkout200"]
|
||||
assert vals[0]["value"] == 6000, f"Expected /checkout p75 zeroth=900, got {vals[0]['value']}"
|
||||
assert vals[1]["value"] == 6000, f"Expected /checkout p75 first=6400, got {vals[1]['value']}"
|
||||
assert vals[-1]["value"] == 6000, f"Expected /checkout p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
# /coupon (delta, service=web): 60 points
|
||||
if "/coupon200" in p75_values:
|
||||
vals = p75_values["/coupon200"]
|
||||
assert vals[0]["value"] == 1250, f"Expected /coupon200 p75 zeroth=900, got {vals[0]['value']}"
|
||||
assert vals[1]["value"] == 1250, f"Expected /coupon200 p75 first=6400, got {vals[1]['value']}"
|
||||
assert vals[-1]["value"] == 1250, f"Expected /coupon200 p75 last=991.304, got {vals[-1]['value']}"
|
||||
|
||||
if "/coupon500" in p75_values:
|
||||
vals = p75_values["/coupon500"]
|
||||
assert vals[0]["value"] == 750, f"Expected /coupon500 p75 zeroth=900, got {vals[0]['value']}"
|
||||
assert vals[1]["value"] == 750, f"Expected /coupon500 p75 first=6400, got {vals[1]['value']}"
|
||||
assert vals[-1]["value"] == 750, f"Expected /coupon500 p75 last=991.304, got {vals[-1]['value']}"
|
||||
assert result_values[-1]["value"] == last_value
|
||||
@@ -5,16 +5,13 @@ Look at the delta_counters_1h.jsonl file for the relevant data
|
||||
import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http import HTTPStatus
|
||||
from typing import Any, Callable, List, Optional, Union
|
||||
|
||||
import pytest
|
||||
from typing import Any, Callable, List
|
||||
|
||||
from fixtures import types
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.metrics import Metrics
|
||||
from fixtures.querier import (
|
||||
build_builder_query,
|
||||
build_order_by,
|
||||
get_all_series,
|
||||
get_series_values,
|
||||
make_query_request,
|
||||
@@ -72,61 +69,16 @@ def test_rate_with_steady_values_and_reset(
|
||||
assert v["value"] >= 0, f"Rate should not be negative: {v['value']}"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"order_suffix,order_by,limit,expected_count,expected_endpoints",
|
||||
[
|
||||
(
|
||||
"no_order",
|
||||
None,
|
||||
None,
|
||||
5,
|
||||
{"/products", "/health", "/checkout", "/orders", "/users"},
|
||||
),
|
||||
(
|
||||
"asc",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
None,
|
||||
5,
|
||||
["/checkout", "/health", "/orders", "/products", "/users"],
|
||||
),
|
||||
(
|
||||
"asc_lim3",
|
||||
[build_order_by("endpoint", "asc")],
|
||||
3,
|
||||
3,
|
||||
["/checkout", "/health", "/orders"],
|
||||
),
|
||||
(
|
||||
"desc",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
None,
|
||||
5,
|
||||
["/users", "/products", "/orders", "/health", "/checkout"],
|
||||
),
|
||||
(
|
||||
"desc_lim3",
|
||||
[build_order_by("endpoint", "desc")],
|
||||
3,
|
||||
3,
|
||||
["/users", "/products", "/orders"],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_rate_group_by_endpoint(
|
||||
signoz: types.SigNoz,
|
||||
create_user_admin: None, # pylint: disable=unused-argument
|
||||
get_token: Callable[[str, str], str],
|
||||
insert_metrics: Callable[[List[Metrics]], None],
|
||||
order_suffix: str,
|
||||
order_by: Optional[List],
|
||||
limit: Optional[int],
|
||||
expected_count: int,
|
||||
expected_endpoints: Union[set, List[str]],
|
||||
) -> None:
|
||||
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
|
||||
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
|
||||
end_ms = int(now.timestamp() * 1000)
|
||||
metric_name = f"test_rate_groupby_{order_suffix}"
|
||||
metric_name = "test_rate_groupby"
|
||||
|
||||
metrics = Metrics.load_from_file(
|
||||
DELTA_COUNTERS_FILE,
|
||||
@@ -142,8 +94,6 @@ def test_rate_group_by_endpoint(
|
||||
"rate",
|
||||
"sum",
|
||||
group_by=["endpoint"],
|
||||
order_by=order_by,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
response = make_query_request(signoz, token, start_ms, end_ms, [query])
|
||||
@@ -152,23 +102,10 @@ def test_rate_group_by_endpoint(
|
||||
data = response.json()
|
||||
all_series = get_all_series(data, "A")
|
||||
|
||||
# Should have 5 different endpoints
|
||||
assert (
|
||||
len(all_series) == expected_count
|
||||
), f"Expected {expected_count} series, got {len(all_series)}"
|
||||
|
||||
endpoint_labels = [
|
||||
series.get("labels", [{}])[0].get("value", "unknown")
|
||||
for series in all_series
|
||||
]
|
||||
|
||||
if isinstance(expected_endpoints, set):
|
||||
assert (
|
||||
set(endpoint_labels) == expected_endpoints
|
||||
), f"Expected endpoints {expected_endpoints}, got {set(endpoint_labels)}"
|
||||
else:
|
||||
assert endpoint_labels == expected_endpoints, (
|
||||
f"Expected endpoints {expected_endpoints}, got {endpoint_labels}"
|
||||
)
|
||||
len(all_series) == 5
|
||||
), f"Expected 5 series for 5 endpoints, got {len(all_series)}"
|
||||
|
||||
# endpoint -> values
|
||||
endpoint_values = {}
|
||||
@@ -177,6 +114,11 @@ def test_rate_group_by_endpoint(
|
||||
values = sorted(series.get("values", []), key=lambda x: x["timestamp"])
|
||||
endpoint_values[endpoint] = values
|
||||
|
||||
expected_endpoints = {"/products", "/health", "/checkout", "/orders", "/users"}
|
||||
assert (
|
||||
set(endpoint_values.keys()) == expected_endpoints
|
||||
), f"Expected endpoints {expected_endpoints}, got {set(endpoint_values.keys())}"
|
||||
|
||||
# at no point rate should be negative
|
||||
for endpoint, values in endpoint_values.items():
|
||||
for v in values:
|
||||
@@ -186,95 +128,93 @@ def test_rate_group_by_endpoint(
|
||||
|
||||
# /health: 60 data points (t01-t60), steady +10/min
|
||||
# rate = 10/60 = 0.167
|
||||
if "/health" in endpoint_values:
|
||||
health_values = endpoint_values["/health"]
|
||||
assert (
|
||||
len(health_values) == 60
|
||||
), f"Expected 60 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(1 for v in health_values if v["value"] == 0.167)
|
||||
assert (
|
||||
count_steady_health == 60
|
||||
), f"Expected == 60 steady rate values (0.167) for /health, got {count_steady_health}"
|
||||
# all /health rates should be 0.167 except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert v["value"] == 0.167, f"Expected /health rate 0.167, got {v['value']}"
|
||||
health_values = endpoint_values["/health"]
|
||||
assert (
|
||||
len(health_values) == 60
|
||||
), f"Expected 60 values for /health, got {len(health_values)}"
|
||||
count_steady_health = sum(1 for v in health_values if v["value"] == 0.167)
|
||||
assert (
|
||||
count_steady_health == 60
|
||||
), f"Expected == 60 steady rate values (0.167) for /health, got {count_steady_health}"
|
||||
# all /health rates should be 0.167 except possibly first/last due to boundaries
|
||||
for v in health_values[1:-1]:
|
||||
assert v["value"] == 0.167, f"Expected /health rate 0.167, got {v['value']}"
|
||||
|
||||
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
|
||||
# rate = 20/60 = 0.333, gap causes lower averaged rate at boundary
|
||||
if "/products" in endpoint_values:
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) == 51
|
||||
), f"Expected 51 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(1 for v in products_values if v["value"] == 0.333)
|
||||
assert (
|
||||
count_steady_products == 51
|
||||
), f"Expected 51 steady rate values (0.333) for /products, got {count_steady_products}"
|
||||
products_values = endpoint_values["/products"]
|
||||
assert (
|
||||
len(products_values) == 51
|
||||
), f"Expected 51 values for /products, got {len(products_values)}"
|
||||
count_steady_products = sum(1 for v in products_values if v["value"] == 0.333)
|
||||
|
||||
assert (
|
||||
count_steady_products == 51
|
||||
), f"Expected 51 steady rate values (0.333) for /products, got {count_steady_products}"
|
||||
|
||||
# /checkout: 61 data points (t00-t60), +1/min normal, +50/min spike at t40-t44
|
||||
# normal rate = 1/60 = 0.0167, spike rate = 50/60 = 0.833
|
||||
if "/checkout" in endpoint_values:
|
||||
checkout_values = endpoint_values["/checkout"]
|
||||
checkout_values = endpoint_values["/checkout"]
|
||||
assert (
|
||||
len(checkout_values) == 61
|
||||
), f"Expected 61 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == 0.0167)
|
||||
assert (
|
||||
count_steady_checkout == 56
|
||||
), f"Expected 56 steady rate values (0.0167) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == 0.833)
|
||||
assert (
|
||||
count_spike_checkout == 5
|
||||
), f"Expected 5 spike rate values (0.833) for /checkout, got {count_spike_checkout}"
|
||||
|
||||
# spike values should be consecutive
|
||||
spike_indices = [
|
||||
i for i, v in enumerate[Any](checkout_values) if v["value"] == 0.833
|
||||
]
|
||||
assert len(spike_indices) == 5, f"Expected 5 spike indices, got {spike_indices}"
|
||||
# consecutiveness
|
||||
for i in range(1, len(spike_indices)):
|
||||
assert (
|
||||
len(checkout_values) == 61
|
||||
), f"Expected 61 values for /checkout, got {len(checkout_values)}"
|
||||
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == 0.0167)
|
||||
assert (
|
||||
count_steady_checkout == 56
|
||||
), f"Expected 56 steady rate values (0.0167) for /checkout, got {count_steady_checkout}"
|
||||
# check that spike values exist (traffic spike +50/min at t40-t44)
|
||||
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == 0.833)
|
||||
assert (
|
||||
count_spike_checkout == 5
|
||||
), f"Expected 5 spike rate values (0.833) for /checkout, got {count_spike_checkout}"
|
||||
# spike values should be consecutive
|
||||
spike_indices = [
|
||||
i for i, v in enumerate[Any](checkout_values) if v["value"] == 0.833
|
||||
]
|
||||
assert len(spike_indices) == 5, f"Expected 5 spike indices, got {spike_indices}"
|
||||
for i in range(1, len(spike_indices)):
|
||||
assert (
|
||||
spike_indices[i] == spike_indices[i - 1] + 1
|
||||
), f"Spike indices should be consecutive, got {spike_indices}"
|
||||
spike_indices[i] == spike_indices[i - 1] + 1
|
||||
), f"Spike indices should be consecutive, got {spike_indices}"
|
||||
|
||||
# /orders: 60 data points (t00-t60) with gap at t30, counter reset at t31 (150->2)
|
||||
# rate = 5/60 = 0.0833
|
||||
# reset at t31 causes: rate at t30 includes gap (lower), t31 has high rate after reset
|
||||
if "/orders" in endpoint_values:
|
||||
orders_values = endpoint_values["/orders"]
|
||||
assert (
|
||||
len(orders_values) == 60
|
||||
), f"Expected 59 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(1 for v in orders_values if v["value"] == 0.0833)
|
||||
assert (
|
||||
count_steady_orders == 58
|
||||
), f"Expected 58 steady rate values (0.0833) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [v["value"] for v in orders_values if v["value"] != 0.0833]
|
||||
assert (
|
||||
len(non_standard_orders) == 2
|
||||
), f"Expected 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
# post-reset value should be higher (new counter value / interval)
|
||||
high_rate_orders = [v for v in non_standard_orders if v > 0.0833]
|
||||
assert (
|
||||
len(high_rate_orders) == 1
|
||||
), f"Expected one high rate value after counter reset, got {non_standard_orders}"
|
||||
orders_values = endpoint_values["/orders"]
|
||||
assert (
|
||||
len(orders_values) == 60
|
||||
), f"Expected 59 values for /orders, got {len(orders_values)}"
|
||||
count_steady_orders = sum(1 for v in orders_values if v["value"] == 0.0833)
|
||||
assert (
|
||||
count_steady_orders == 58
|
||||
), f"Expected 58 steady rate values (0.0833) for /orders, got {count_steady_orders}"
|
||||
# check for counter reset effects - there should be some non-standard values
|
||||
non_standard_orders = [v["value"] for v in orders_values if v["value"] != 0.0833]
|
||||
assert (
|
||||
len(non_standard_orders) == 2
|
||||
), f"Expected 2 non-standard values due to counter reset, got {non_standard_orders}"
|
||||
# post-reset value should be higher (new counter value / interval)
|
||||
high_rate_orders = [v for v in non_standard_orders if v > 0.0833]
|
||||
assert (
|
||||
len(high_rate_orders) == 1
|
||||
), f"Expected one high rate value after counter reset, got {non_standard_orders}"
|
||||
|
||||
# /users: 56 data points (t05-t60), sparse +1 every 5 minutes (12 of them)
|
||||
# Rate = 1/60 = 0.0167 during increment, 0 during flat periods
|
||||
if "/users" in endpoint_values:
|
||||
users_values = endpoint_values["/users"]
|
||||
assert (
|
||||
len(users_values) == 56
|
||||
), f"Expected 56 values for /users, got {len(users_values)}"
|
||||
count_zero_users = sum(1 for v in users_values if v["value"] == 0)
|
||||
# most values should be 0 (flat periods between increments)
|
||||
assert (
|
||||
count_zero_users == 44
|
||||
), f"Expected 44 zero rate values for /users (sparse data), got {count_zero_users}"
|
||||
# non-zero values should be 0.0167 (1/60 increment rate)
|
||||
non_zero_users = [v["value"] for v in users_values if v["value"] != 0]
|
||||
count_increment_rate = sum(1 for v in non_zero_users if v == 0.0167)
|
||||
assert (
|
||||
count_increment_rate == 12
|
||||
), f"Expected 12 increment rate values (0.0167) for /users, got {count_increment_rate}"
|
||||
users_values = endpoint_values["/users"]
|
||||
assert (
|
||||
len(users_values) == 56
|
||||
), f"Expected 56 values for /users, got {len(users_values)}"
|
||||
count_zero_users = sum(1 for v in users_values if v["value"] == 0)
|
||||
# most values should be 0 (flat periods between increments)
|
||||
assert (
|
||||
count_zero_users == 44
|
||||
), f"Expected 44 zero rate values for /users (sparse data), got {count_zero_users}"
|
||||
# non-zero values should be 0.0167 (1/60 increment rate)
|
||||
non_zero_users = [v["value"] for v in users_values if v["value"] != 0]
|
||||
count_increment_rate = sum(1 for v in non_zero_users if v == 0.0167)
|
||||
assert (
|
||||
count_increment_rate == 12
|
||||
), f"Expected 12 increment rate values (0.0167) for /users, got {count_increment_rate}"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user