Compare commits

..

15 Commits

Author SHA1 Message Date
Karan Balani
c0b6ca37f5 chore: remove commented code 2026-02-17 13:07:07 +05:30
Karan Balani
a94f5b1cab chore: remove register api route tests as we moved to root user 2026-02-17 00:34:22 +05:30
Karan Balani
f1998e24be chore: fix tests 2026-02-17 00:08:37 +05:30
Karan Balani
e667b904d5 chore: ref editor user values 2026-02-16 22:37:48 +05:30
Karan Balani
2f6cd523d6 chore: fix lint 2026-02-16 22:34:48 +05:30
Karan Balani
fca4118151 chore: use root user email and password variables 2026-02-16 22:31:22 +05:30
Karan Balani
f07f62975b chore: fix use of direct admin emails to variables 2026-02-16 22:28:26 +05:30
Karan Balani
fcb668687f fix: integation tests for root user 2026-02-16 22:03:17 +05:30
grandwizard28
3de8b3b5b2 feat: toggle foreign key constraint 2026-02-16 16:54:12 +05:30
grandwizard28
a3d74b19fc feat: handle setup completed 2026-02-16 15:59:10 +05:30
grandwizard28
7edd97b3d2 feat: add org_name to root user reconciliation 2026-02-16 15:59:09 +05:30
grandwizard28
17958d8aa0 fix: add openapi spec 2026-02-16 15:59:09 +05:30
grandwizard28
f8341790e9 fix: clean the update functions and the reconcilliation logic 2026-02-16 15:59:09 +05:30
grandwizard28
e84c6749b2 feat: fix promote to root and authz grant changes where needed 2026-02-16 15:59:09 +05:30
grandwizard28
65fd5f274c feat: add root user support with protection guards and env-based provisioning 2026-02-16 15:59:09 +05:30
146 changed files with 2506 additions and 5538 deletions

View File

@@ -176,6 +176,25 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
### Unsere Projektbetreuer
#### Backend
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
#### Frontend
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
<br /><br />
## Dokumentation

View File

@@ -221,6 +221,34 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
### Project maintainers
#### Backend
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
- [Ekansh Gupta](https://github.com/eKuG)
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
#### Frontend
- [Yunus M](https://github.com/YounixM)
- [Vikrant Gupta](https://github.com/vikrantgupta25)
- [Sagar Rajput](https://github.com/SagarRajput-7)
- [Shaheer Kochai](https://github.com/ahmadshaheer)
- [Amlan Kumar Nandy](https://github.com/amlannandy)
- [Sahil Khan](https://github.com/sawhil)
- [Aditya Singh](https://github.com/aks07)
- [Abhi Kumar](https://github.com/ahrefabhi)
#### DevOps
- [Prashant Shahi](https://github.com/prashant-shahi)
- [Vibhu Pandey](https://github.com/therealpandey)
<br /><br />

View File

@@ -187,6 +187,25 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
### 项目维护人员
#### 后端
- [Ankit Nayan](https://github.com/ankitnayan)
- [Nityananda Gohain](https://github.com/nityanandagohain)
- [Srikanth Chekuri](https://github.com/srikanthccv)
- [Vishal Sharma](https://github.com/makeavish)
#### 前端
- [Palash Gupta](https://github.com/palashgdev)
- [Yunus M](https://github.com/YounixM)
- [Rajat Dabade](https://github.com/Rajat-Dabade)
#### 运维开发
- [Prashant Shahi](https://github.com/prashant-shahi)
<br /><br />
## 文档

View File

@@ -1,4 +1,4 @@
FROM node:18-bullseye AS build
FROM node:22-bookworm AS build
WORKDIR /opt/
COPY ./frontend/ ./

View File

@@ -294,6 +294,7 @@ flagger:
config:
boolean:
use_span_metrics: true
interpolation_enabled: false
kafka_span_eval: false
string:
float:

View File

@@ -170,14 +170,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, valuer.MustNewUUID(orgId))
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
}
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password), user.WithRole(types.RoleViewer))
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
if err != nil {
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
}

View File

@@ -55,7 +55,6 @@
"@signozhq/icons": "0.1.0",
"@signozhq/input": "0.0.2",
"@signozhq/popover": "0.0.0",
"@signozhq/radio-group": "0.0.2",
"@signozhq/resizable": "0.0.0",
"@signozhq/sonner": "0.1.0",
"@signozhq/table": "0.3.7",

View File

@@ -21,7 +21,6 @@ import '@signozhq/design-tokens';
import '@signozhq/icons';
import '@signozhq/input';
import '@signozhq/popover';
import '@signozhq/radio-group';
import '@signozhq/resizable';
import '@signozhq/sonner';
import '@signozhq/table';

View File

@@ -9,6 +9,74 @@
padding: 0px;
}
.dashboard-header {
border-bottom: 1px solid var(--bg-slate-400);
display: flex;
justify-content: space-between;
gap: 16px;
align-items: center;
padding: 0 8px;
box-sizing: border-box;
.dashboard-breadcrumbs {
width: 100%;
height: 48px;
display: flex;
gap: 6px;
align-items: center;
max-width: 80%;
.dashboard-btn {
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
letter-spacing: -0.07px;
padding: 0px;
height: 20px;
}
.dashboard-btn:hover {
background-color: unset;
}
.id-btn {
display: flex;
align-items: center;
padding: 0px 2px;
border-radius: 2px;
background: rgba(113, 144, 249, 0.1);
color: var(--bg-robin-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
height: 20px;
max-width: calc(100% - 120px);
span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.ant-btn-icon {
margin-inline-end: 4px;
}
}
.id-btn:hover {
background: rgba(113, 144, 249, 0.1);
color: var(--bg-robin-300);
}
}
}
.dashboard-details {
display: flex;
justify-content: space-between;
@@ -467,6 +535,15 @@
.dashboard-description-container {
color: var(--bg-ink-400);
.dashboard-header {
border-bottom: 1px solid var(--bg-vanilla-300);
.dashboard-breadcrumbs {
.dashboard-btn {
color: var(--bg-ink-400);
}
}
}
.dashboard-details {
.left-section {
.dashboard-title {

View File

@@ -16,7 +16,9 @@ import {
} from 'antd';
import logEvent from 'api/common/logEvent';
import ConfigureIcon from 'assets/Integrations/ConfigureIcon';
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
import ROUTES from 'constants/routes';
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
@@ -25,6 +27,7 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import useComponentPermission from 'hooks/useComponentPermission';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { useNotifications } from 'hooks/useNotifications';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import { isEmpty } from 'lodash-es';
import {
Check,
@@ -34,6 +37,7 @@ import {
FolderKanban,
Fullscreen,
Globe,
LayoutGrid,
LockKeyhole,
PenLine,
X,
@@ -47,7 +51,6 @@ import { ROLES, USER_ROLES } from 'types/roles';
import { ComponentTypes } from 'utils/permission';
import { v4 as uuid } from 'uuid';
import DashboardHeader from '../components/DashboardHeader/DashboardHeader';
import DashboardGraphSlider from '../ComponentsSlider';
import DashboardSettings from '../DashboardSettings';
import { Base64Icons } from '../DashboardSettings/General/utils';
@@ -68,6 +71,7 @@ interface DashboardDescriptionProps {
// eslint-disable-next-line sonarjs/cognitive-complexity
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
const { safeNavigate } = useSafeNavigate();
const { handle } = props;
const {
selectedDashboard,
@@ -76,6 +80,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
layouts,
setLayouts,
isDashboardLocked,
listSortOrder,
setSelectedDashboard,
handleToggleDashboardSlider,
setSelectedRowWidgetId,
@@ -287,6 +292,17 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
});
}
function goToListPage(): void {
const urlParams = new URLSearchParams();
urlParams.set('columnKey', listSortOrder.columnKey as string);
urlParams.set('order', listSortOrder.order as string);
urlParams.set('page', listSortOrder.pagination as string);
urlParams.set('search', listSortOrder.search as string);
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
safeNavigate(generatedUrl);
}
const {
data: publicDashboardResponse,
isLoading: isLoadingPublicDashboardData,
@@ -335,7 +351,32 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
return (
<Card className="dashboard-description-container">
<DashboardHeader />
<div className="dashboard-header">
<section className="dashboard-breadcrumbs">
<Button
type="text"
icon={<LayoutGrid size={14} />}
className="dashboard-btn"
onClick={(): void => goToListPage()}
>
Dashboard /
</Button>
<Button type="text" className="id-btn dashboard-name-btn">
<img
src={image}
alt="dashboard-icon"
style={{ height: '14px', width: '14px' }}
/>
{title}
</Button>
</section>
<HeaderRightSection
enableAnnouncements={false}
enableShare
enableFeedback
/>
</div>
<section className="dashboard-details">
<div className="left-section">
<img src={image} alt="dashboard-img" className="dashboard-img" />

View File

@@ -1,71 +0,0 @@
.dashboard-breadcrumbs {
width: 100%;
height: 48px;
display: flex;
gap: 6px;
align-items: center;
max-width: 80%;
.dashboard-btn {
display: flex;
align-items: center;
color: var(--bg-vanilla-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
letter-spacing: -0.07px;
padding: 0px;
height: 20px;
}
.dashboard-btn:hover {
background-color: unset;
}
.id-btn {
display: flex;
align-items: center;
gap: 4px;
padding: 0px 2px;
border-radius: 2px;
background: rgba(113, 144, 249, 0.1);
color: var(--bg-robin-400);
font-family: Inter;
font-size: 14px;
font-style: normal;
font-weight: 400;
line-height: 20px; /* 142.857% */
height: 20px;
max-width: calc(100% - 120px);
span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.ant-btn-icon {
margin-inline-end: 4px;
}
}
.id-btn:hover {
background: rgba(113, 144, 249, 0.1);
color: var(--bg-robin-300);
}
.dashboard-icon-image {
height: 14px;
width: 14px;
}
}
.lightMode {
.dashboard-breadcrumbs {
.dashboard-btn {
color: var(--bg-ink-400);
}
}
}

View File

@@ -1,55 +0,0 @@
import { useCallback } from 'react';
import { Button } from 'antd';
import ROUTES from 'constants/routes';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
import { LayoutGrid } from 'lucide-react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { DashboardData } from 'types/api/dashboard/getAll';
import { Base64Icons } from '../../DashboardSettings/General/utils';
import './DashboardBreadcrumbs.styles.scss';
function DashboardBreadcrumbs(): JSX.Element {
const { safeNavigate } = useSafeNavigate();
const { selectedDashboard, listSortOrder } = useDashboard();
const selectedData = selectedDashboard
? {
...selectedDashboard.data,
uuid: selectedDashboard.id,
}
: ({} as DashboardData);
const { title = '', image = Base64Icons[0] } = selectedData || {};
const goToListPage = useCallback(() => {
const urlParams = new URLSearchParams();
urlParams.set('columnKey', listSortOrder.columnKey as string);
urlParams.set('order', listSortOrder.order as string);
urlParams.set('page', listSortOrder.pagination as string);
urlParams.set('search', listSortOrder.search as string);
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
safeNavigate(generatedUrl);
}, [listSortOrder, safeNavigate]);
return (
<div className="dashboard-breadcrumbs">
<Button
type="text"
icon={<LayoutGrid size={14} />}
className="dashboard-btn"
onClick={goToListPage}
>
Dashboard /
</Button>
<Button type="text" className="id-btn dashboard-name-btn">
<img src={image} alt="dashboard-icon" className="dashboard-icon-image" />
{title}
</Button>
</div>
);
}
export default DashboardBreadcrumbs;

View File

@@ -1,15 +0,0 @@
.dashboard-header {
border-bottom: 1px solid var(--bg-slate-400);
display: flex;
justify-content: space-between;
gap: 16px;
align-items: center;
padding: 0 8px;
box-sizing: border-box;
}
.lightMode {
.dashboard-header {
border-bottom: 1px solid var(--bg-vanilla-300);
}
}

View File

@@ -1,17 +0,0 @@
import { memo } from 'react';
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
import DashboardBreadcrumbs from './DashboardBreadcrumbs';
import './DashboardHeader.styles.scss';
function DashboardHeader(): JSX.Element {
return (
<div className="dashboard-header">
<DashboardBreadcrumbs />
<HeaderRightSection enableAnnouncements={false} enableShare enableFeedback />
</div>
);
}
export default memo(DashboardHeader);

View File

@@ -23,7 +23,6 @@ export default function ChartWrapper({
width: containerWidth,
height: containerHeight,
showTooltip = true,
showLegend = true,
canPinTooltip = false,
syncMode,
syncKey,
@@ -37,9 +36,6 @@ export default function ChartWrapper({
const legendComponent = useCallback(
(averageLegendWidth: number): React.ReactNode => {
if (!showLegend) {
return null;
}
return (
<Legend
config={config}
@@ -48,7 +44,7 @@ export default function ChartWrapper({
/>
);
},
[config, legendConfig.position, showLegend],
[config, legendConfig.position],
);
const renderTooltipCallback = useCallback(
@@ -64,7 +60,6 @@ export default function ChartWrapper({
return (
<PlotContextProvider>
<ChartLayout
showLegend={showLegend}
config={config}
containerWidth={containerWidth}
containerHeight={containerHeight}

View File

@@ -1,55 +0,0 @@
import { useCallback } from 'react';
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
import {
HistogramTooltipProps,
TooltipRenderArgs,
} from 'lib/uPlotV2/components/types';
import { HistogramChartProps } from '../types';
export default function Histogram(props: HistogramChartProps): JSX.Element {
const {
children,
renderTooltip: customRenderTooltip,
isQueriesMerged,
...rest
} = props;
const renderTooltip = useCallback(
(props: TooltipRenderArgs): React.ReactNode => {
if (customRenderTooltip) {
return customRenderTooltip(props);
}
const content = buildTooltipContent({
data: props.uPlotInstance.data,
series: props.uPlotInstance.series,
dataIndexes: props.dataIndexes,
activeSeriesIndex: props.seriesIndex,
uPlotInstance: props.uPlotInstance,
yAxisUnit: rest.yAxisUnit ?? '',
decimalPrecision: rest.decimalPrecision,
});
const tooltipProps: HistogramTooltipProps = {
...props,
timezone: rest.timezone,
yAxisUnit: rest.yAxisUnit,
decimalPrecision: rest.decimalPrecision,
content,
};
return <HistogramTooltip {...tooltipProps} />;
},
[customRenderTooltip, rest.timezone, rest.yAxisUnit, rest.decimalPrecision],
);
return (
<ChartWrapper
showLegend={!isQueriesMerged}
{...rest}
renderTooltip={renderTooltip}
>
{children}
</ChartWrapper>
);
}

View File

@@ -7,7 +7,6 @@ interface BaseChartProps {
width: number;
height: number;
showTooltip?: boolean;
showLegend?: boolean;
timezone: string;
canPinTooltip?: boolean;
yAxisUnit?: string;
@@ -18,7 +17,6 @@ interface BaseChartProps {
interface UPlotBasedChartProps {
config: UPlotConfigBuilder;
data: uPlot.AlignedData;
legendConfig: LegendConfig;
syncMode?: DashboardCursorSync;
syncKey?: string;
plotRef?: (plot: uPlot | null) => void;
@@ -28,20 +26,14 @@ interface UPlotBasedChartProps {
}
export interface TimeSeriesChartProps
extends BaseChartProps,
UPlotBasedChartProps {}
export interface HistogramChartProps
extends BaseChartProps,
UPlotBasedChartProps {
isQueriesMerged?: boolean;
legendConfig: LegendConfig;
}
export interface BarChartProps extends BaseChartProps, UPlotBasedChartProps {
legendConfig: LegendConfig;
isStackedBarChart?: boolean;
}
export type ChartProps =
| TimeSeriesChartProps
| BarChartProps
| HistogramChartProps;
export type ChartProps = TimeSeriesChartProps | BarChartProps;

View File

@@ -1,69 +0,0 @@
import { renderHook } from '@testing-library/react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useScrollWidgetIntoView } from '../useScrollWidgetIntoView';
jest.mock('providers/Dashboard/Dashboard');
type MockHTMLElement = {
scrollIntoView: jest.Mock;
focus: jest.Mock;
};
function createMockElement(): MockHTMLElement {
return {
scrollIntoView: jest.fn(),
focus: jest.fn(),
};
}
describe('useScrollWidgetIntoView', () => {
const mockedUseDashboard = useDashboard as jest.MockedFunction<
typeof useDashboard
>;
beforeEach(() => {
jest.clearAllMocks();
});
it('scrolls into view and focuses when toScrollWidgetId matches widget id', () => {
const setToScrollWidgetId = jest.fn();
const mockElement = createMockElement();
const ref = ({
current: mockElement,
} as unknown) as React.RefObject<HTMLDivElement>;
mockedUseDashboard.mockReturnValue(({
toScrollWidgetId: 'widget-id',
setToScrollWidgetId,
} as unknown) as ReturnType<typeof useDashboard>);
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
expect(mockElement.scrollIntoView).toHaveBeenCalledWith({
behavior: 'smooth',
block: 'center',
});
expect(mockElement.focus).toHaveBeenCalled();
expect(setToScrollWidgetId).toHaveBeenCalledWith('');
});
it('does nothing when toScrollWidgetId does not match widget id', () => {
const setToScrollWidgetId = jest.fn();
const mockElement = createMockElement();
const ref = ({
current: mockElement,
} as unknown) as React.RefObject<HTMLDivElement>;
mockedUseDashboard.mockReturnValue(({
toScrollWidgetId: 'other-widget',
setToScrollWidgetId,
} as unknown) as ReturnType<typeof useDashboard>);
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
expect(mockElement.scrollIntoView).not.toHaveBeenCalled();
expect(mockElement.focus).not.toHaveBeenCalled();
expect(setToScrollWidgetId).not.toHaveBeenCalled();
});
});

View File

@@ -1,26 +0,0 @@
import { RefObject, useEffect } from 'react';
import { useDashboard } from 'providers/Dashboard/Dashboard';
/**
* Scrolls the given widget container into view when the dashboard
* requests it via `toScrollWidgetId`.
*
* Intended for use in panel components that render a single widget.
*/
export function useScrollWidgetIntoView<T extends HTMLElement>(
widgetId: string,
widgetContainerRef: RefObject<T>,
): void {
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
useEffect(() => {
if (toScrollWidgetId === widgetId) {
widgetContainerRef.current?.scrollIntoView({
behavior: 'smooth',
block: 'center',
});
widgetContainerRef.current?.focus();
setToScrollWidgetId('');
}
}, [toScrollWidgetId, setToScrollWidgetId, widgetId, widgetContainerRef]);
}

View File

@@ -1,14 +1,12 @@
import { useMemo } from 'react';
import cx from 'classnames';
import { calculateChartDimensions } from 'container/DashboardContainer/visualization/charts/utils';
import { MAX_LEGEND_WIDTH } from 'lib/uPlotV2/components/Legend/Legend';
import { LegendConfig, LegendPosition } from 'lib/uPlotV2/components/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import './ChartLayout.styles.scss';
export interface ChartLayoutProps {
showLegend?: boolean;
legendComponent: (legendPerSet: number) => React.ReactNode;
children: (props: {
chartWidth: number;
@@ -22,7 +20,6 @@ export interface ChartLayoutProps {
config: UPlotConfigBuilder;
}
export default function ChartLayout({
showLegend = true,
legendComponent,
children,
layoutChildren,
@@ -33,15 +30,6 @@ export default function ChartLayout({
}: ChartLayoutProps): JSX.Element {
const chartDimensions = useMemo(
() => {
if (!showLegend) {
return {
width: containerWidth,
height: containerHeight,
legendWidth: 0,
legendHeight: 0,
averageLegendWidth: MAX_LEGEND_WIDTH,
};
}
const legendItemsMap = config.getLegendItems();
const seriesLabels = Object.values(legendItemsMap)
.map((item) => item.label)
@@ -54,7 +42,7 @@ export default function ChartLayout({
});
},
// eslint-disable-next-line react-hooks/exhaustive-deps
[containerWidth, containerHeight, legendConfig, showLegend],
[containerWidth, containerHeight, legendConfig],
);
return (
@@ -72,17 +60,15 @@ export default function ChartLayout({
averageLegendWidth: chartDimensions.averageLegendWidth,
})}
</div>
{showLegend && (
<div
className="chart-layout__legend-wrapper"
style={{
height: chartDimensions.legendHeight,
width: chartDimensions.legendWidth,
}}
>
{legendComponent(chartDimensions.averageLegendWidth)}
</div>
)}
<div
className="chart-layout__legend-wrapper"
style={{
height: chartDimensions.legendHeight,
width: chartDimensions.legendWidth,
}}
>
{legendComponent(chartDimensions.averageLegendWidth)}
</div>
</div>
{layoutChildren}
</div>

View File

@@ -1,10 +1,10 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import ContextMenu from 'periscope/components/ContextMenu';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
@@ -27,6 +27,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
onToggleModelHandler,
} = props;
const uPlotRef = useRef<uPlot | null>(null);
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const graphRef = useRef<HTMLDivElement>(null);
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
@@ -35,7 +36,16 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
useScrollWidgetIntoView(widget.id, graphRef);
useEffect(() => {
if (toScrollWidgetId === widget.id) {
graphRef.current?.scrollIntoView({
behavior: 'smooth',
block: 'center',
});
graphRef.current?.focus();
setToScrollWidgetId('');
}
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);

View File

@@ -1,114 +0,0 @@
import { useMemo, useRef } from 'react';
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import { DashboardCursorSync } from 'lib/uPlotV2/plugins/TooltipPlugin/types';
import { useTimezone } from 'providers/Timezone';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
import Histogram from '../../charts/Histogram/Histogram';
import ChartManager from '../../components/ChartManager/ChartManager';
import {
prepareHistogramPanelConfig,
prepareHistogramPanelData,
} from './utils';
import '../Panel.styles.scss';
function HistogramPanel(props: PanelWrapperProps): JSX.Element {
const {
panelMode,
queryResponse,
widget,
isFullViewMode,
onToggleModelHandler,
} = props;
const uPlotRef = useRef<uPlot | null>(null);
const graphRef = useRef<HTMLDivElement>(null);
const containerDimensions = useResizeObserver(graphRef);
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
useScrollWidgetIntoView(widget.id, graphRef);
const config = useMemo(() => {
return prepareHistogramPanelConfig({
widget,
isDarkMode,
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
panelMode,
});
}, [widget, isDarkMode, queryResponse?.data?.payload, panelMode]);
const chartData = useMemo(() => {
if (!queryResponse?.data?.payload) {
return [];
}
return prepareHistogramPanelData({
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
bucketWidth: widget?.bucketWidth,
bucketCount: widget?.bucketCount,
mergeAllActiveQueries: widget?.mergeAllActiveQueries,
});
}, [
queryResponse?.data?.payload,
widget?.bucketWidth,
widget?.bucketCount,
widget?.mergeAllActiveQueries,
]);
const layoutChildren = useMemo(() => {
if (!isFullViewMode || widget.mergeAllActiveQueries) {
return null;
}
return (
<ChartManager
config={config}
alignedData={chartData}
yAxisUnit={widget.yAxisUnit}
onCancel={onToggleModelHandler}
/>
);
}, [
isFullViewMode,
config,
chartData,
widget.yAxisUnit,
onToggleModelHandler,
widget.mergeAllActiveQueries,
]);
return (
<div className="panel-container" ref={graphRef}>
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
<Histogram
config={config}
legendConfig={{
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
}}
plotRef={(plot: uPlot | null): void => {
uPlotRef.current = plot;
}}
onDestroy={(): void => {
uPlotRef.current = null;
}}
isQueriesMerged={widget.mergeAllActiveQueries}
yAxisUnit={widget.yAxisUnit}
decimalPrecision={widget.decimalPrecision}
syncMode={DashboardCursorSync.Crosshair}
timezone={timezone.value}
data={chartData as uPlot.AlignedData}
width={containerDimensions.width}
height={containerDimensions.height}
layoutChildren={layoutChildren}
/>
)}
</div>
);
}
export default HistogramPanel;

View File

@@ -1,223 +0,0 @@
/* eslint-disable simple-import-sort/imports */
import type { UseQueryResult } from 'react-query';
import { render, screen } from 'tests/test-utils';
import { PanelMode } from 'container/DashboardContainer/visualization/panels/types';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import { Widgets } from 'types/api/dashboard/getAll';
import {
MetricQueryRangeSuccessResponse,
MetricRangePayloadProps,
} from 'types/api/metrics/getQueryRange';
import HistogramPanel from '../HistogramPanel';
import { HistogramChartProps } from 'container/DashboardContainer/visualization/charts/types';
jest.mock('hooks/useDimensions', () => ({
useResizeObserver: jest.fn().mockReturnValue({ width: 800, height: 400 }),
}));
jest.mock('hooks/useDarkMode', () => ({
useIsDarkMode: jest.fn().mockReturnValue(false),
}));
jest.mock('providers/Timezone', () => ({
__esModule: true,
// Provide a no-op provider component so AllTheProviders can render
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
<>{children}</>
),
// And mock the hook used by HistogramPanel
useTimezone: jest.fn().mockReturnValue({
timezone: { value: 'UTC' },
}),
}));
jest.mock(
'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView',
() => ({
useScrollWidgetIntoView: jest.fn(),
}),
);
jest.mock(
'container/DashboardContainer/visualization/charts/Histogram/Histogram',
() => ({
__esModule: true,
default: (props: HistogramChartProps): JSX.Element => (
<div data-testid="histogram-chart">
<div data-testid="histogram-props">
{JSON.stringify({
legendPosition: props.legendConfig?.position,
isQueriesMerged: props.isQueriesMerged,
yAxisUnit: props.yAxisUnit,
decimalPrecision: props.decimalPrecision,
})}
</div>
{props.layoutChildren}
</div>
),
}),
);
jest.mock(
'container/DashboardContainer/visualization/components/ChartManager/ChartManager',
() => ({
__esModule: true,
default: (): JSX.Element => (
<div data-testid="chart-manager">ChartManager</div>
),
}),
);
function createQueryResponse(
payloadOverrides: Partial<MetricRangePayloadProps> = {},
): { data: { payload: MetricRangePayloadProps } } {
const basePayload: MetricRangePayloadProps = {
data: {
result: [
{
metric: {},
queryName: 'A',
legend: 'Series A',
values: [
[1, '10'],
[2, '20'],
],
},
],
resultType: 'matrix',
newResult: {
data: {
result: [],
resultType: 'matrix',
},
},
},
};
return {
data: {
payload: {
...basePayload,
...payloadOverrides,
},
},
};
}
type WidgetLike = {
id: string;
yAxisUnit: string;
decimalPrecision: number;
legendPosition: LegendPosition;
mergeAllActiveQueries: boolean;
};
function createWidget(overrides: Partial<WidgetLike> = {}): WidgetLike {
return {
id: 'widget-id',
yAxisUnit: 'ms',
decimalPrecision: 2,
legendPosition: LegendPosition.BOTTOM,
mergeAllActiveQueries: false,
...overrides,
};
}
describe('HistogramPanel', () => {
it('renders Histogram when container has dimensions', () => {
const widget = (createWidget() as unknown) as Widgets;
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
MetricQueryRangeSuccessResponse,
Error
>;
render(
<HistogramPanel
panelMode={PanelMode.DASHBOARD_VIEW}
widget={widget}
queryResponse={queryResponse}
isFullViewMode={false}
onToggleModelHandler={jest.fn()}
onDragSelect={jest.fn()}
/>,
);
expect(screen.getByTestId('histogram-chart')).toBeInTheDocument();
});
it('passes legend position and other props to Histogram', () => {
const widget = (createWidget({
legendPosition: LegendPosition.RIGHT,
}) as unknown) as Widgets;
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
MetricQueryRangeSuccessResponse,
Error
>;
render(
<HistogramPanel
panelMode={PanelMode.DASHBOARD_VIEW}
widget={widget}
queryResponse={queryResponse}
isFullViewMode={false}
onToggleModelHandler={jest.fn()}
onDragSelect={jest.fn()}
/>,
);
const propsJson = screen.getByTestId('histogram-props').textContent || '{}';
const parsed = JSON.parse(propsJson);
expect(parsed.legendPosition).toBe(LegendPosition.RIGHT);
expect(parsed.yAxisUnit).toBe('ms');
expect(parsed.decimalPrecision).toBe(2);
});
it('renders ChartManager in full view when queries are not merged', () => {
const widget = (createWidget({
mergeAllActiveQueries: false,
}) as unknown) as Widgets;
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
MetricQueryRangeSuccessResponse,
Error
>;
render(
<HistogramPanel
panelMode={PanelMode.DASHBOARD_VIEW}
widget={widget}
queryResponse={queryResponse}
isFullViewMode
onToggleModelHandler={jest.fn()}
onDragSelect={jest.fn()}
/>,
);
expect(screen.getByTestId('chart-manager')).toBeInTheDocument();
});
it('does not render ChartManager when queries are merged', () => {
const widget = (createWidget({
mergeAllActiveQueries: true,
}) as unknown) as Widgets;
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
MetricQueryRangeSuccessResponse,
Error
>;
render(
<HistogramPanel
panelMode={PanelMode.DASHBOARD_VIEW}
widget={widget}
queryResponse={queryResponse}
isFullViewMode
onToggleModelHandler={jest.fn()}
onDragSelect={jest.fn()}
/>,
);
expect(screen.queryByTestId('chart-manager')).not.toBeInTheDocument();
});
});

View File

@@ -1,231 +0,0 @@
import { histogramBucketSizes } from '@grafana/data';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
import { getLegend } from 'lib/dashboard/getQueryResults';
import getLabelName from 'lib/getLabelName';
import { DrawStyle } from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { AlignedData } from 'uplot';
import { incrRoundDn, roundDecimals } from 'utils/round';
import { PanelMode } from '../types';
import { buildBaseConfig } from '../utils/baseConfigBuilder';
import {
buildHistogramBuckets,
mergeAlignedDataTables,
prependNullBinToFirstHistogramSeries,
replaceUndefinedWithNullInAlignedData,
} from '../utils/histogram';
export interface PrepareHistogramPanelDataParams {
apiResponse: MetricRangePayloadProps;
bucketWidth?: number;
bucketCount?: number;
mergeAllActiveQueries?: boolean;
}
const BUCKET_OFFSET = 0;
const HIST_SORT = (a: number, b: number): number => a - b;
function extractNumericValues(
result: MetricRangePayloadProps['data']['result'],
): number[] {
const values: number[] = [];
for (const item of result) {
for (const [, valueStr] of item.values) {
values.push(Number.parseFloat(valueStr) || 0);
}
}
return values;
}
function computeSmallestDelta(sortedValues: number[]): number {
if (sortedValues.length <= 1) {
return 0;
}
let smallest = Infinity;
for (let i = 1; i < sortedValues.length; i++) {
const delta = sortedValues[i] - sortedValues[i - 1];
if (delta > 0) {
smallest = Math.min(smallest, delta);
}
}
return smallest === Infinity ? 0 : smallest;
}
function selectBucketSize({
range,
bucketCount,
smallestDelta,
bucketWidthOverride,
}: {
range: number;
bucketCount: number;
smallestDelta: number;
bucketWidthOverride?: number;
}): number {
if (bucketWidthOverride != null && bucketWidthOverride > 0) {
return bucketWidthOverride;
}
const targetSize = range / bucketCount;
for (const candidate of histogramBucketSizes) {
if (targetSize < candidate && candidate >= smallestDelta) {
return candidate;
}
}
return 0;
}
function buildFrames(
result: MetricRangePayloadProps['data']['result'],
mergeAllActiveQueries: boolean,
): number[][] {
const frames: number[][] = result.map((item) =>
item.values.map(([, valueStr]) => Number.parseFloat(valueStr) || 0),
);
if (mergeAllActiveQueries && frames.length > 1) {
const first = frames[0];
for (let i = 1; i < frames.length; i++) {
first.push(...frames[i]);
frames[i] = [];
}
}
return frames;
}
export function prepareHistogramPanelData({
apiResponse,
bucketWidth,
bucketCount: bucketCountProp = DEFAULT_BUCKET_COUNT,
mergeAllActiveQueries = false,
}: PrepareHistogramPanelDataParams): AlignedData {
const bucketCount = bucketCountProp ?? DEFAULT_BUCKET_COUNT;
const result = apiResponse.data.result;
const seriesValues = extractNumericValues(result);
if (seriesValues.length === 0) {
return [[]];
}
const sorted = [...seriesValues].sort((a, b) => a - b);
const min = sorted[0];
const max = sorted[sorted.length - 1];
const range = max - min;
const smallestDelta = computeSmallestDelta(sorted);
let bucketSize = selectBucketSize({
range,
bucketCount,
smallestDelta,
bucketWidthOverride: bucketWidth,
});
if (bucketSize <= 0) {
bucketSize = range > 0 ? range / bucketCount : 1;
}
const getBucket = (v: number): number =>
roundDecimals(incrRoundDn(v - BUCKET_OFFSET, bucketSize) + BUCKET_OFFSET, 9);
const frames = buildFrames(result, mergeAllActiveQueries);
const histogramsPerSeries: AlignedData[] = frames
.filter((frame) => frame.length > 0)
.map((frame) => buildHistogramBuckets(frame, getBucket, HIST_SORT));
if (histogramsPerSeries.length === 0) {
return [[]];
}
const mergedHistogramData = mergeAlignedDataTables(histogramsPerSeries);
replaceUndefinedWithNullInAlignedData(mergedHistogramData);
prependNullBinToFirstHistogramSeries(mergedHistogramData, bucketSize);
return mergedHistogramData;
}
export function prepareHistogramPanelConfig({
widget,
apiResponse,
panelMode,
isDarkMode,
}: {
widget: Widgets;
apiResponse: MetricRangePayloadProps;
panelMode: PanelMode;
isDarkMode: boolean;
}): UPlotConfigBuilder {
const builder = buildBaseConfig({
widget,
isDarkMode,
apiResponse,
panelMode,
panelType: PANEL_TYPES.HISTOGRAM,
});
builder.setCursor({
drag: {
x: false,
y: false,
setScale: true,
},
focus: {
prox: 1e3,
},
});
builder.addScale({
scaleKey: 'x',
time: false,
auto: true,
});
builder.addScale({
scaleKey: 'y',
time: false,
auto: true,
min: 0,
});
const currentQuery = widget.query;
const mergeAllActiveQueries = widget?.mergeAllActiveQueries ?? false;
// When merged, data has only one y column; add one series to match. Otherwise add one per result.
if (mergeAllActiveQueries) {
builder.addSeries({
label: '',
scaleKey: 'y',
drawStyle: DrawStyle.Bar,
panelType: PANEL_TYPES.HISTOGRAM,
colorMapping: widget.customLegendColors ?? {},
spanGaps: false,
barWidthFactor: 1,
pointSize: 5,
lineColor: '#3f5ecc',
fillColor: '#4E74F8',
isDarkMode,
});
} else {
apiResponse.data.result.forEach((series) => {
const baseLabelName = getLabelName(
series.metric,
series.queryName || '', // query
series.legend || '',
);
const label = currentQuery
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
builder.addSeries({
label: label,
scaleKey: 'y',
drawStyle: DrawStyle.Bar,
panelType: PANEL_TYPES.HISTOGRAM,
colorMapping: widget.customLegendColors ?? {},
spanGaps: false,
barWidthFactor: 1,
pointSize: 5,
isDarkMode,
});
});
}
return builder;
}

View File

@@ -2,12 +2,12 @@ import { useEffect, useMemo, useRef, useState } from 'react';
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useResizeObserver } from 'hooks/useDimensions';
import { LegendPosition } from 'lib/uPlotV2/components/types';
import { ContextMenu } from 'periscope/components/ContextMenu';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { useTimezone } from 'providers/Timezone';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import uPlot from 'uplot';
@@ -26,6 +26,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
isFullViewMode,
onToggleModelHandler,
} = props;
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
const graphRef = useRef<HTMLDivElement>(null);
const [minTimeScale, setMinTimeScale] = useState<number>();
const [maxTimeScale, setMaxTimeScale] = useState<number>();
@@ -34,7 +35,16 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const { timezone } = useTimezone();
useScrollWidgetIntoView(widget.id, graphRef);
useEffect(() => {
if (toScrollWidgetId === widget.id) {
graphRef.current?.scrollIntoView({
behavior: 'smooth',
block: 'center',
});
graphRef.current?.focus();
setToScrollWidgetId('');
}
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
useEffect((): void => {
const { startTime, endTime } = getTimeRange(queryResponse);

View File

@@ -14,6 +14,11 @@ export interface GraphVisibilityState {
dataIndex: SeriesVisibilityItem[];
}
export interface SeriesVisibilityState {
labels: string[];
visibility: boolean[];
}
/**
* Context in which a panel is rendered. Used to vary behavior (e.g. persistence,
* interactions) per context.

View File

@@ -62,10 +62,10 @@ describe('legendVisibilityUtils', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual([
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
expect(result).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('returns visibility by index including duplicate labels', () => {
@@ -85,11 +85,10 @@ describe('legendVisibilityUtils', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual([
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
{ label: 'Memory', show: false },
]);
expect(result).toEqual({
labels: ['CPU', 'CPU', 'Memory'],
visibility: [true, false, false],
});
});
it('returns null on malformed JSON in localStorage', () => {
@@ -128,10 +127,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('adds a new widget entry when other widgets already exist', () => {
@@ -150,7 +149,7 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-new');
expect(stored).not.toBeNull();
expect(stored).toEqual([{ label: 'CPU', show: false }]);
expect(stored).toEqual({ labels: ['CPU'], visibility: [false] });
});
it('updates existing widget visibility when entry already exists', () => {
@@ -176,10 +175,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'CPU', show: false },
{ label: 'Memory', show: true },
]);
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [false, true],
});
});
it('silently handles malformed existing JSON without throwing', () => {
@@ -202,10 +201,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'x-axis', show: true },
{ label: 'CPU', show: false },
]);
expect(stored).toEqual({
labels: ['x-axis', 'CPU'],
visibility: [true, false],
});
const expected = [
{
name: 'widget-1',
@@ -232,12 +231,14 @@ describe('legendVisibilityUtils', () => {
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual([
{ label: 'A', show: true },
]);
expect(getStoredSeriesVisibility('widget-b')).toEqual([
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual({
labels: ['A'],
visibility: [true],
});
expect(getStoredSeriesVisibility('widget-b')).toEqual({
labels: ['B'],
visibility: [true],
});
});
it('calls setItem with storage key and stringified visibility states', () => {

View File

@@ -19,9 +19,9 @@ export interface BaseConfigBuilderProps {
widget: Widgets;
apiResponse: MetricRangePayloadProps;
isDarkMode: boolean;
onClick?: OnClickPluginOpts['onClick'];
onDragSelect?: (startTime: number, endTime: number) => void;
timezone?: Timezone;
onClick: OnClickPluginOpts['onClick'];
onDragSelect: (startTime: number, endTime: number) => void;
timezone: Timezone;
panelMode: PanelMode;
panelType: PANEL_TYPES;
minTimeScale?: number;
@@ -40,10 +40,8 @@ export function buildBaseConfig({
minTimeScale,
maxTimeScale,
}: BaseConfigBuilderProps): UPlotConfigBuilder {
const tzDate = timezone
? (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value)
: undefined;
const tzDate = (timestamp: number): Date =>
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
const builder = new UPlotConfigBuilder({
onDragSelect,

View File

@@ -1,225 +0,0 @@
import {
NULL_EXPAND,
NULL_REMOVE,
NULL_RETAIN,
} from 'container/PanelWrapper/constants';
import { AlignedData } from 'uplot';
/**
* Expands contiguous runs of `null` values to the left and right of their
* original positions so that visual gaps in the series are continuous.
*
* This is used when `NULL_EXPAND` mode is selected while joining series.
*/
function propagateNullsAcrossNeighbors(
seriesValues: Array<number | null>,
nullIndices: number[],
alignedLength: number,
): void {
for (
let i = 0, currentIndex, lastExpandedNullIndex = -1;
i < nullIndices.length;
i++
) {
const nullIndex = nullIndices[i];
if (nullIndex > lastExpandedNullIndex) {
// expand left until we hit a non-null value
currentIndex = nullIndex - 1;
while (currentIndex >= 0 && seriesValues[currentIndex] == null) {
seriesValues[currentIndex--] = null;
}
// expand right until we hit a non-null value
currentIndex = nullIndex + 1;
while (currentIndex < alignedLength && seriesValues[currentIndex] == null) {
seriesValues[(lastExpandedNullIndex = currentIndex++)] = null;
}
}
}
}
/**
* Merges multiple uPlot `AlignedData` tables into a single aligned table.
*
* - Merges and sorts all distinct x-values from each table.
* - Re-aligns every series onto the merged x-axis.
* - Applies per-series null handling (`NULL_REMOVE`, `NULL_RETAIN`, `NULL_EXPAND`).
*/
/* eslint-disable sonarjs/cognitive-complexity */
export function mergeAlignedDataTables(
alignedTables: AlignedData[],
nullModes?: number[][],
): AlignedData {
let mergedXValues: Set<number>;
// eslint-disable-next-line prefer-const
mergedXValues = new Set();
// Collect all unique x-values from every table.
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
const table = alignedTables[tableIndex];
const xValues = table[0];
const xLength = xValues.length;
for (let i = 0; i < xLength; i++) {
mergedXValues.add(xValues[i]);
}
}
// Sorted, merged x-axis used by the final result.
const alignedData: (number | null | undefined)[][] = [
Array.from(mergedXValues).sort((a, b) => a - b),
];
const alignedLength = alignedData[0].length;
// Map from x-value to its index in the merged x-axis.
const xValueToIndexMap = new Map<number, number>();
for (let i = 0; i < alignedLength; i++) {
xValueToIndexMap.set(alignedData[0][i] as number, i);
}
// Re-align all series from all tables onto the merged x-axis.
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
const table = alignedTables[tableIndex];
const xValues = table[0];
for (let seriesIndex = 1; seriesIndex < table.length; seriesIndex++) {
const seriesValues = table[seriesIndex];
const alignedSeriesValues = Array(alignedLength).fill(undefined);
const nullHandlingMode = nullModes
? nullModes[tableIndex][seriesIndex]
: NULL_RETAIN;
const nullIndices: number[] = [];
for (let i = 0; i < seriesValues.length; i++) {
const valueAtPoint = seriesValues[i];
const alignedIndex = xValueToIndexMap.get(xValues[i]);
if (alignedIndex == null) {
continue;
}
if (valueAtPoint === null) {
if (nullHandlingMode !== NULL_REMOVE) {
alignedSeriesValues[alignedIndex] = valueAtPoint;
if (nullHandlingMode === NULL_EXPAND) {
nullIndices.push(alignedIndex);
}
}
} else {
alignedSeriesValues[alignedIndex] = valueAtPoint;
}
}
// Optionally expand nulls to visually preserve gaps.
propagateNullsAcrossNeighbors(
alignedSeriesValues,
nullIndices,
alignedLength,
);
alignedData.push(alignedSeriesValues);
}
}
return alignedData as AlignedData;
}
/**
* Builds histogram buckets from raw values.
*
* - Each value is mapped into a bucket via `getBucketForValue`.
* - Counts how many values fall into each bucket.
* - Optionally sorts buckets using the provided comparator.
*/
export function buildHistogramBuckets(
values: number[],
getBucketForValue: (value: number) => number,
sortBuckets?: ((a: number, b: number) => number) | null,
): AlignedData {
const bucketMap = new Map<number, { value: number; count: number }>();
for (let i = 0; i < values.length; i++) {
let value = values[i];
if (value != null) {
value = getBucketForValue(value);
}
const bucket = bucketMap.get(value);
if (bucket) {
bucket.count++;
} else {
bucketMap.set(value, { value, count: 1 });
}
}
const buckets = [...bucketMap.values()];
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
sortBuckets && buckets.sort((a, b) => sortBuckets(a.value, b.value));
const bucketValues = Array(buckets.length);
const bucketCounts = Array(buckets.length);
for (let i = 0; i < buckets.length; i++) {
bucketValues[i] = buckets[i].value;
bucketCounts[i] = buckets[i].count;
}
return [bucketValues, bucketCounts];
}
/**
* Mutates an `AlignedData` instance, replacing all `undefined` entries
* with explicit `null` values so uPlot treats them as gaps.
*/
export function replaceUndefinedWithNullInAlignedData(
data: AlignedData,
): AlignedData {
const seriesList = data as (number | null | undefined)[][];
for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex++) {
for (
let pointIndex = 0;
pointIndex < seriesList[seriesIndex].length;
pointIndex++
) {
if (seriesList[seriesIndex][pointIndex] === undefined) {
seriesList[seriesIndex][pointIndex] = null;
}
}
}
return data;
}
/**
* Ensures the first histogram series has a leading "empty" bin so that
* all series line up visually when rendered as bars.
*
* - Prepends a new x-value (first x - `bucketSize`) to the first series.
* - Prepends `null` to all subsequent series at the same index.
*/
export function prependNullBinToFirstHistogramSeries(
alignedData: AlignedData,
bucketSize: number,
): void {
const seriesList = alignedData as (number | null)[][];
if (
seriesList.length > 0 &&
seriesList[0].length > 0 &&
seriesList[0][0] !== null
) {
seriesList[0].unshift(seriesList[0][0] - bucketSize);
for (let seriesIndex = 1; seriesIndex < seriesList.length; seriesIndex++) {
seriesList[seriesIndex].unshift(null);
}
}
}

View File

@@ -1,6 +1,10 @@
import { LOCALSTORAGE } from 'constants/localStorage';
import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
import {
GraphVisibilityState,
SeriesVisibilityItem,
SeriesVisibilityState,
} from '../types';
/**
* Retrieves the stored series visibility for a specific widget from localStorage by index.
@@ -10,7 +14,7 @@ import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
*/
export function getStoredSeriesVisibility(
widgetId: string,
): SeriesVisibilityItem[] | null {
): SeriesVisibilityState | null {
try {
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
@@ -25,7 +29,10 @@ export function getStoredSeriesVisibility(
return null;
}
return widgetState.dataIndex;
return {
labels: widgetState.dataIndex.map((item) => item.label),
visibility: widgetState.dataIndex.map((item) => item.show),
};
} catch (error) {
if (error instanceof SyntaxError) {
// If the stored data is malformed, remove it

View File

@@ -477,67 +477,47 @@
}
}
.observability-tools-radio-container {
display: flex;
flex-wrap: wrap;
gap: 0 12px;
width: 528px;
.observability-tool-radio-item {
display: flex;
align-items: center;
gap: 8px;
height: 32px;
width: calc((528px - 12px) / 2);
flex: 0 0 calc((528px - 12px) / 2);
label {
color: var(--l1-foreground);
font-size: 13px;
cursor: pointer;
}
button[role='radio'] {
&[data-state='unchecked'] {
border-color: var(--l3-border) !important;
border-width: 1px !important;
}
}
&.observability-tool-others-item {
.onboarding-questionaire-other-input {
flex: 1;
}
}
}
}
.migration-timeline-radio-container,
.opentelemetry-radio-container {
display: flex;
flex-wrap: wrap;
gap: 0 12px;
width: 528px;
.migration-timeline-radio-item,
.opentelemetry-radio-item {
display: flex;
align-items: center;
gap: 8px;
height: 32px;
width: calc((528px - 12px) / 2);
flex: 0 0 calc((528px - 12px) / 2);
.opentelemetry-radio-group {
width: 100%;
label {
color: var(--l1-foreground);
font-size: 13px;
cursor: pointer;
.opentelemetry-radio-items-wrapper {
display: flex;
flex-direction: row;
flex-wrap: nowrap;
gap: 12px;
width: 100%;
}
button[role='radio'] {
&[data-state='unchecked'] {
border-color: var(--l3-border) !important;
border-width: 1px !important;
.opentelemetry-radio-item {
display: flex;
align-items: center;
gap: 8px;
height: 32px;
width: calc((528px - 12px) / 2);
min-width: 258px;
flex: 0 0 calc((528px - 12px) / 2);
color: var(--l1-foreground);
font-family: Inter, sans-serif;
font-size: 13px;
font-weight: 400;
line-height: 1;
letter-spacing: -0.065px;
box-sizing: border-box;
.ant-radio {
.ant-radio-inner {
width: 16px;
height: 16px;
border-color: var(--l3-border);
}
&.ant-radio-checked .ant-radio-inner {
border-color: var(--bg-robin-500);
background-color: var(--bg-robin-500);
}
}
}
}
@@ -997,6 +977,27 @@
color: var(--bg-slate-300);
}
.opentelemetry-radio-container {
.opentelemetry-radio-group {
.opentelemetry-radio-items-wrapper {
.opentelemetry-radio-item {
color: var(--l1-foreground);
.ant-radio {
.ant-radio-inner {
border-color: var(--l3-border);
}
&.ant-radio-checked .ant-radio-inner {
border-color: var(--bg-robin-500);
background-color: var(--bg-robin-500);
}
}
}
}
}
}
.onboarding-back-button {
border-color: var(--text-vanilla-300);
color: var(--l3-foreground);

View File

@@ -1,27 +1,34 @@
/* eslint-disable sonarjs/cognitive-complexity */
import { useEffect, useState } from 'react';
import { useTranslation } from 'react-i18next';
import { Button } from '@signozhq/button';
import { Checkbox } from '@signozhq/checkbox';
import { Input } from '@signozhq/input';
import {
RadioGroup,
RadioGroupItem,
RadioGroupLabel,
} from '@signozhq/radio-group';
import { Typography } from 'antd';
import { Radio, Typography } from 'antd';
import { RadioChangeEvent } from 'antd/es/radio';
import logEvent from 'api/common/logEvent';
import { ArrowRight } from 'lucide-react';
import editOrg from 'api/organization/editOrg';
import { useNotifications } from 'hooks/useNotifications';
import { ArrowRight, Loader2 } from 'lucide-react';
import { useAppContext } from 'providers/App/App';
import '../OnboardingQuestionaire.styles.scss';
export interface OrgData {
id: string;
displayName: string;
}
export interface OrgDetails {
organisationName: string;
usesObservability: boolean | null;
observabilityTool: string | null;
otherTool: string | null;
usesOtel: boolean | null;
migrationTimeline: string | null;
}
interface OrgQuestionsProps {
currentOrgData: OrgData | null;
orgDetails: OrgDetails;
onNext: (details: OrgDetails) => void;
}
@@ -38,14 +45,19 @@ const observabilityTools = {
Others: 'Others',
};
const migrationTimelineOptions = {
lessThanMonth: 'Less than a month',
oneToThreeMonths: '1-3 months',
greaterThanThreeMonths: 'Greater than 3 months',
justExploring: 'Just exploring',
};
function OrgQuestions({
currentOrgData,
orgDetails,
onNext,
}: OrgQuestionsProps): JSX.Element {
const { updateOrg } = useAppContext();
const { notifications } = useNotifications();
function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
const { t } = useTranslation(['organizationsettings', 'common']);
const [organisationName, setOrganisationName] = useState<string>(
orgDetails?.organisationName || '',
);
const [observabilityTool, setObservabilityTool] = useState<string | null>(
orgDetails?.observabilityTool || null,
);
@@ -54,33 +66,92 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
);
const [isNextDisabled, setIsNextDisabled] = useState<boolean>(true);
useEffect(() => {
setOrganisationName(orgDetails.organisationName);
}, [orgDetails.organisationName]);
const [isLoading, setIsLoading] = useState<boolean>(false);
const [usesOtel, setUsesOtel] = useState<boolean | null>(orgDetails.usesOtel);
const [migrationTimeline, setMigrationTimeline] = useState<string | null>(
orgDetails?.migrationTimeline || null,
);
const showMigrationQuestion =
observabilityTool !== null && observabilityTool !== 'None';
const handleNext = (): void => {
const handleOrgNameUpdate = async (): Promise<void> => {
const usesObservability =
!observabilityTool?.includes('None') && observabilityTool !== null;
logEvent('Org Onboarding: Answered', {
usesObservability,
observabilityTool,
otherTool,
usesOtel,
migrationTimeline,
});
/* Early bailout if orgData is not set or if the organisation name is not set or if the organisation name is empty or if the organisation name is the same as the one in the orgData */
if (
!currentOrgData ||
!organisationName ||
organisationName === '' ||
orgDetails.organisationName === organisationName
) {
logEvent('Org Onboarding: Answered', {
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
onNext({
usesObservability,
observabilityTool,
otherTool,
usesOtel,
migrationTimeline,
});
onNext({
organisationName,
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
return;
}
try {
setIsLoading(true);
const { statusCode, error } = await editOrg({
displayName: organisationName,
orgId: currentOrgData.id,
});
if (statusCode === 204) {
updateOrg(currentOrgData?.id, organisationName);
logEvent('Org Onboarding: Org Name Updated', {
organisationName,
});
logEvent('Org Onboarding: Answered', {
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
onNext({
organisationName,
usesObservability,
observabilityTool,
otherTool,
usesOtel,
});
} else {
logEvent('Org Onboarding: Org Name Update Failed', {
organisationName: orgDetails.organisationName,
});
notifications.error({
message:
error ||
t('something_went_wrong', {
ns: 'common',
}),
});
}
setIsLoading(false);
} catch (error) {
setIsLoading(false);
notifications.error({
message: t('something_went_wrong', {
ns: 'common',
}),
});
}
};
const isValidUsesObservability = (): boolean => {
@@ -102,29 +173,22 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
useEffect(() => {
const isValidObservability = isValidUsesObservability();
const isMigrationValid = !showMigrationQuestion || migrationTimeline !== null;
if (usesOtel !== null && isValidObservability && isMigrationValid) {
if (organisationName !== '' && usesOtel !== null && isValidObservability) {
setIsNextDisabled(false);
} else {
setIsNextDisabled(true);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [
usesOtel,
observabilityTool,
otherTool,
migrationTimeline,
showMigrationQuestion,
]);
}, [organisationName, usesOtel, observabilityTool, otherTool]);
const handleObservabilityToolChange = (value: string): void => {
setObservabilityTool(value);
if (value !== 'Others') {
setOtherTool('');
}
if (value === 'None') {
setMigrationTimeline(null);
const createObservabilityToolHandler = (tool: string) => (
checked: boolean,
): void => {
if (checked) {
setObservabilityTool(tool);
} else if (observabilityTool === tool) {
setObservabilityTool(null);
}
};
@@ -132,6 +196,10 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
setUsesOtel(value === 'yes');
};
const handleOnNext = (): void => {
handleOrgNameUpdate();
};
return (
<div className="questions-container">
<div className="onboarding-header-section">
@@ -146,24 +214,40 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
<div className="questions-form-container">
<div className="questions-form">
<div className="form-group">
<label className="question" htmlFor="organisationName">
Name of your company
</label>
<Input
type="text"
name="organisationName"
id="organisationName"
placeholder="e.g. Simpsonville"
autoComplete="off"
value={organisationName}
onChange={(e): void => setOrganisationName(e.target.value)}
/>
</div>
<div className="form-group">
<label className="question" htmlFor="observabilityTool">
Which observability tool do you currently use?
</label>
<RadioGroup
value={observabilityTool || ''}
onValueChange={handleObservabilityToolChange}
className="observability-tools-radio-container"
>
<div className="observability-tools-checkbox-container">
{Object.entries(observabilityTools).map(([tool, label]) => {
if (tool === 'Others') {
return (
<div
key={tool}
className="radio-item observability-tool-radio-item observability-tool-others-item"
className="checkbox-item observability-tool-checkbox-item observability-tool-others-item"
>
<RadioGroupItem value={tool} id={`radio-${tool}`} />
{observabilityTool === 'Others' ? (
<Checkbox
id={`checkbox-${tool}`}
checked={observabilityTool === tool}
onCheckedChange={createObservabilityToolHandler(tool)}
labelName={observabilityTool === 'Others' ? '' : label}
/>
{observabilityTool === 'Others' && (
<Input
type="text"
className="onboarding-questionaire-other-input"
@@ -172,60 +256,55 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
autoFocus
onChange={(e): void => setOtherTool(e.target.value)}
/>
) : (
<RadioGroupLabel htmlFor={`radio-${tool}`}>{label}</RadioGroupLabel>
)}
</div>
);
}
return (
<div key={tool} className="radio-item observability-tool-radio-item">
<RadioGroupItem value={tool} id={`radio-${tool}`} />
<RadioGroupLabel htmlFor={`radio-${tool}`}>{label}</RadioGroupLabel>
<div
key={tool}
className="checkbox-item observability-tool-checkbox-item"
>
<Checkbox
id={`checkbox-${tool}`}
checked={observabilityTool === tool}
onCheckedChange={createObservabilityToolHandler(tool)}
labelName={label}
/>
</div>
);
})}
</RadioGroup>
</div>
{showMigrationQuestion && (
<div className="form-group">
<div className="question">
What is your timeline for migrating to SigNoz?
</div>
<RadioGroup
value={migrationTimeline || ''}
onValueChange={setMigrationTimeline}
className="migration-timeline-radio-container"
>
{Object.entries(migrationTimelineOptions).map(([key, label]) => (
<div key={key} className="radio-item migration-timeline-radio-item">
<RadioGroupItem value={key} id={`radio-migration-${key}`} />
<RadioGroupLabel htmlFor={`radio-migration-${key}`}>
{label}
</RadioGroupLabel>
</div>
))}
</RadioGroup>
</div>
)}
</div>
<div className="form-group">
<div className="question">Do you already use OpenTelemetry?</div>
<RadioGroup
value={usesOtel === true ? 'yes' : usesOtel === false ? 'no' : ''}
onValueChange={handleOtelChange}
className="opentelemetry-radio-container"
>
<div className="radio-item opentelemetry-radio-item">
<RadioGroupItem value="yes" id="radio-otel-yes" />
<RadioGroupLabel htmlFor="radio-otel-yes">Yes</RadioGroupLabel>
</div>
<div className="radio-item opentelemetry-radio-item">
<RadioGroupItem value="no" id="radio-otel-no" />
<RadioGroupLabel htmlFor="radio-otel-no">No</RadioGroupLabel>
</div>
</RadioGroup>
<div className="opentelemetry-radio-container">
<Radio.Group
value={((): string | undefined => {
if (usesOtel === true) {
return 'yes';
}
if (usesOtel === false) {
return 'no';
}
return undefined;
})()}
onChange={(e: RadioChangeEvent): void =>
handleOtelChange(e.target.value)
}
className="opentelemetry-radio-group"
>
<div className="opentelemetry-radio-items-wrapper">
<Radio value="yes" className="opentelemetry-radio-item">
Yes
</Radio>
<Radio value="no" className="opentelemetry-radio-item">
No
</Radio>
</div>
</Radio.Group>
</div>
</div>
</div>
@@ -233,9 +312,15 @@ function OrgQuestions({ orgDetails, onNext }: OrgQuestionsProps): JSX.Element {
variant="solid"
color="primary"
className={`onboarding-next-button ${isNextDisabled ? 'disabled' : ''}`}
onClick={handleNext}
onClick={handleOnNext}
disabled={isNextDisabled}
suffixIcon={<ArrowRight size={12} />}
suffixIcon={
isLoading ? (
<Loader2 className="animate-spin" size={12} />
) : (
<ArrowRight size={12} />
)
}
>
Next
</Button>

View File

@@ -69,7 +69,7 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
expect(screen.getByText(/welcome to signoz cloud/i)).toBeInTheDocument();
expect(screen.getByLabelText(/name of your company/i)).toBeInTheDocument();
expect(
screen.getByText(/which observability tool do you currently use/i),
).toBeInTheDocument();
@@ -86,12 +86,15 @@ describe('OnboardingQuestionaire Component', () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
const datadogCheckbox = screen.getByLabelText(/datadog/i);
await user.click(datadogCheckbox);
const otelYes = screen.getByRole('radio', { name: /yes/i });
await user.click(otelYes);
await user.click(screen.getByLabelText(/just exploring/i));
const nextButton = await screen.findByRole('button', { name: /next/i });
expect(nextButton).not.toBeDisabled();
@@ -109,38 +112,15 @@ describe('OnboardingQuestionaire Component', () => {
).toBeInTheDocument();
});
it('shows migration timeline options only when specific observability tools are selected', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);
// Initially not visible
expect(
screen.queryByText(/What is your timeline for migrating to SigNoz/i),
).not.toBeInTheDocument();
const datadogCheckbox = screen.getByLabelText(/datadog/i);
await user.click(datadogCheckbox);
expect(
await screen.findByText(/What is your timeline for migrating to SigNoz/i),
).toBeInTheDocument();
// Not visible when None is selected
const noneCheckbox = screen.getByLabelText(/none\/starting fresh/i);
await user.click(noneCheckbox);
expect(
screen.queryByText(/What is your timeline for migrating to SigNoz/i),
).not.toBeInTheDocument();
});
it('proceeds to step 2 when next is clicked', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<OnboardingQuestionaire />);
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
const nextButton = screen.getByRole('button', { name: /next/i });
await user.click(nextButton);
@@ -157,10 +137,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -176,10 +157,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
await waitFor(() => {
@@ -193,10 +175,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -220,10 +203,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -248,10 +232,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate through steps 1 and 2
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -282,10 +267,11 @@ describe('OnboardingQuestionaire Component', () => {
render(<OnboardingQuestionaire />);
// Navigate to step 3
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
await user.click(screen.getByLabelText(/just exploring/i));
await user.click(screen.getByRole('button', { name: /next/i }));
expect(
@@ -304,4 +290,40 @@ describe('OnboardingQuestionaire Component', () => {
).toBeInTheDocument();
});
});
describe('Error Handling', () => {
it('handles organization update error gracefully', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.put(EDIT_ORG_ENDPOINT, (_, res, ctx) =>
res(
ctx.status(500),
ctx.json({
error: {
code: 'INTERNAL_ERROR',
message: 'Failed to update organization',
},
}),
),
),
);
render(<OnboardingQuestionaire />);
const orgNameInput = screen.getByLabelText(/name of your company/i);
await user.clear(orgNameInput);
await user.type(orgNameInput, 'Test Company');
await user.click(screen.getByLabelText(/datadog/i));
await user.click(screen.getByRole('radio', { name: /yes/i }));
const nextButton = screen.getByRole('button', { name: /next/i });
await user.click(nextButton);
// Component should still be functional
await waitFor(() => {
expect(nextButton).not.toBeDisabled();
});
});
});
});

View File

@@ -23,7 +23,7 @@ import InviteTeamMembers from './InviteTeamMembers/InviteTeamMembers';
import OptimiseSignozNeeds, {
OptimiseSignozDetails,
} from './OptimiseSignozNeeds/OptimiseSignozNeeds';
import OrgQuestions, { OrgDetails } from './OrgQuestions/OrgQuestions';
import OrgQuestions, { OrgData, OrgDetails } from './OrgQuestions/OrgQuestions';
import './OnboardingQuestionaire.styles.scss';
@@ -37,11 +37,11 @@ export const showErrorNotification = (
};
const INITIAL_ORG_DETAILS: OrgDetails = {
organisationName: '',
usesObservability: true,
observabilityTool: '',
otherTool: '',
usesOtel: null,
migrationTimeline: null,
};
const INITIAL_SIGNOZ_DETAILS: SignozDetails = {
@@ -79,11 +79,25 @@ function OnboardingQuestionaire(): JSX.Element {
InviteTeamMembersProps[] | null
>(null);
const [currentOrgData, setCurrentOrgData] = useState<OrgData | null>(null);
const [
updatingOrgOnboardingStatus,
setUpdatingOrgOnboardingStatus,
] = useState<boolean>(false);
useEffect(() => {
if (org) {
setCurrentOrgData(org[0]);
setOrgDetails({
...orgDetails,
organisationName: org[0].displayName,
});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [org]);
useEffect(() => {
logEvent('Org Onboarding: Started', {
org_id: org?.[0]?.id,
@@ -161,7 +175,6 @@ function OnboardingQuestionaire(): JSX.Element {
? (orgDetails?.otherTool as string)
: (orgDetails?.observabilityTool as string),
where_did_you_discover_signoz: signozDetails?.discoverSignoz as string,
timeline_for_migrating_to_signoz: orgDetails?.migrationTimeline as string,
reasons_for_interest_in_signoz: signozDetails?.interestInSignoz?.includes(
'Others',
)
@@ -195,6 +208,7 @@ function OnboardingQuestionaire(): JSX.Element {
<div className="onboarding-questionaire-content">
{currentStep === 1 && (
<OrgQuestions
currentOrgData={currentOrgData}
orgDetails={{
...orgDetails,
usesOtel: orgDetails.usesOtel ?? null,

View File

@@ -1,11 +1,11 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import BarPanel from 'container/DashboardContainer/visualization/panels/BarPanel/BarPanel';
import HistogramPanel from 'container/DashboardContainer/visualization/panels/HistogramPanel/HistogramPanel';
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
import HistogramPanelWrapper from './HistogramPanelWrapper';
import ListPanelWrapper from './ListPanelWrapper';
import PiePanelWrapper from './PiePanelWrapper';
import TablePanelWrapper from './TablePanelWrapper';
import UplotPanelWrapper from './UplotPanelWrapper';
import ValuePanelWrapper from './ValuePanelWrapper';
export const PanelTypeVsPanelWrapper = {
@@ -16,8 +16,8 @@ export const PanelTypeVsPanelWrapper = {
[PANEL_TYPES.TRACE]: null,
[PANEL_TYPES.EMPTY_WIDGET]: null,
[PANEL_TYPES.PIE]: PiePanelWrapper,
[PANEL_TYPES.BAR]: BarPanel,
[PANEL_TYPES.HISTOGRAM]: HistogramPanel,
[PANEL_TYPES.BAR]: UplotPanelWrapper,
[PANEL_TYPES.HISTOGRAM]: HistogramPanelWrapper,
};
export const DEFAULT_BUCKET_COUNT = 30;

View File

@@ -1,8 +0,0 @@
import { HistogramTooltipProps } from '../types';
import Tooltip from './Tooltip';
export default function HistogramTooltip(
props: HistogramTooltipProps,
): JSX.Element {
return <Tooltip {...props} showTooltipHeader={false} />;
}

View File

@@ -16,16 +16,12 @@ export default function Tooltip({
uPlotInstance,
timezone,
content,
showTooltipHeader = true,
}: TooltipProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const tooltipContent = content ?? [];
const headerTitle = useMemo(() => {
if (!showTooltipHeader) {
return null;
}
const data = uPlotInstance.data;
const cursorIdx = uPlotInstance.cursor.idx;
if (cursorIdx == null) {
@@ -34,12 +30,7 @@ export default function Tooltip({
return dayjs(data[0][cursorIdx] * 1000)
.tz(timezone)
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
}, [
timezone,
uPlotInstance.data,
uPlotInstance.cursor.idx,
showTooltipHeader,
]);
}, [timezone, uPlotInstance.data, uPlotInstance.cursor.idx]);
return (
<div
@@ -48,11 +39,9 @@ export default function Tooltip({
isDarkMode ? 'darkMode' : 'lightMode',
)}
>
{showTooltipHeader && (
<div className="uplot-tooltip-header">
<span>{headerTitle}</span>
</div>
)}
<div className="uplot-tooltip-header">
<span>{headerTitle}</span>
</div>
<div
style={{
height: Math.min(

View File

@@ -60,7 +60,6 @@ export interface TooltipRenderArgs {
}
export interface BaseTooltipProps {
showTooltipHeader?: boolean;
timezone: string;
yAxisUnit?: string;
decimalPrecision?: PrecisionOption;
@@ -75,14 +74,7 @@ export interface BarTooltipProps extends BaseTooltipProps, TooltipRenderArgs {
isStackedBarChart?: boolean;
}
export interface HistogramTooltipProps
extends BaseTooltipProps,
TooltipRenderArgs {}
export type TooltipProps =
| TimeSeriesTooltipProps
| BarTooltipProps
| HistogramTooltipProps;
export type TooltipProps = TimeSeriesTooltipProps | BarTooltipProps;
export enum LegendPosition {
BOTTOM = 'bottom',

View File

@@ -1,4 +1,4 @@
import { SeriesVisibilityItem } from 'container/DashboardContainer/visualization/panels/types';
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
import { getStoredSeriesVisibility } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { thresholdsDrawHook } from 'lib/uPlotV2/hooks/useThresholdsDrawHook';
@@ -238,7 +238,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
/**
* Returns stored series visibility by index from localStorage when preferences source is LOCAL_STORAGE, otherwise null.
*/
private getStoredVisibility(): SeriesVisibilityItem[] | null {
private getStoredVisibility(): SeriesVisibilityState | null {
if (
this.widgetId &&
this.selectionPreferencesSource === SelectionPreferencesSource.LOCAL_STORAGE
@@ -248,98 +248,14 @@ export class UPlotConfigBuilder extends ConfigBuilder<
return null;
}
/**
* Derive visibility resolution state from stored preferences and current series:
* - visibleStoredLabels: labels that should always be visible
* - hiddenStoredLabels: labels that should always be hidden
* - hasActivePreference: whether a "mix" preference applies to new labels
*/
// eslint-disable-next-line sonarjs/cognitive-complexity
private getVisibilityResolutionState(): {
visibleStoredLabels: Set<string>;
hiddenStoredLabels: Set<string>;
hasActivePreference: boolean;
} {
const seriesVisibilityState = this.getStoredVisibility();
if (!seriesVisibilityState || seriesVisibilityState.length === 0) {
return {
visibleStoredLabels: new Set<string>(),
hiddenStoredLabels: new Set<string>(),
hasActivePreference: false,
};
}
// Single pass over stored items to derive:
// - visibleStoredLabels: any label that is ever stored as visible
// - hiddenStoredLabels: labels that are only ever stored as hidden
// - hasMixPreference: there is at least one visible and one hidden entry
const visibleStoredLabels = new Set<string>();
const hiddenStoredLabels = new Set<string>();
let hasAnyVisible = false;
let hasAnyHidden = false;
for (const { label, show } of seriesVisibilityState) {
if (show) {
hasAnyVisible = true;
visibleStoredLabels.add(label);
// If a label is ever visible, it should not be treated as "only hidden"
if (hiddenStoredLabels.has(label)) {
hiddenStoredLabels.delete(label);
}
} else {
hasAnyHidden = true;
// Only track as hidden if we have not already seen it as visible
if (!visibleStoredLabels.has(label)) {
hiddenStoredLabels.add(label);
}
}
}
const hasMixPreference = hasAnyVisible && hasAnyHidden;
// Current series labels in this chart.
const currentSeriesLabels = this.series.map(
(s: UPlotSeriesBuilder) => s.getConfig().label ?? '',
);
// Check if any stored "visible" label exists in the current series list.
const hasVisibleIntersection =
visibleStoredLabels.size > 0 &&
currentSeriesLabels.some((label) => visibleStoredLabels.has(label));
// Active preference only when there is a mix AND at least one visible
// stored label is present in the current series list.
const hasActivePreference = hasMixPreference && hasVisibleIntersection;
// We apply stored visibility in two cases:
// - There is an active preference (mix + intersection), OR
// - There is no mix (all true or all false) preserve legacy behavior.
const shouldApplyStoredVisibility = !hasMixPreference || hasActivePreference;
if (!shouldApplyStoredVisibility) {
return {
visibleStoredLabels: new Set<string>(),
hiddenStoredLabels: new Set<string>(),
hasActivePreference,
};
}
return {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
};
}
/**
* Get legend items with visibility state restored from localStorage if available
*/
getLegendItems(): Record<number, LegendItem> {
const {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
} = this.getVisibilityResolutionState();
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
);
return this.series.reduce((acc, s: UPlotSeriesBuilder, index: number) => {
const seriesConfig = s.getConfig();
@@ -347,11 +263,11 @@ export class UPlotConfigBuilder extends ConfigBuilder<
// +1 because uPlot series 0 is x-axis/time; data series are at 1, 2, ... (also matches stored visibility[0]=time, visibility[1]=first data, ...)
const seriesIndex = index + 1;
const show = resolveSeriesVisibility({
seriesIndex,
seriesShow: seriesConfig.show,
seriesLabel: label,
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
});
acc[seriesIndex] = {
@@ -380,23 +296,22 @@ export class UPlotConfigBuilder extends ConfigBuilder<
...DEFAULT_PLOT_CONFIG,
};
const {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
} = this.getVisibilityResolutionState();
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
);
config.series = [
{ value: (): string => '', label: 'Timestamp' }, // Base series for timestamp
...this.series.map((s) => {
{ value: (): string => '' }, // Base series for timestamp
...this.series.map((s, index) => {
const series = s.getConfig();
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
const visible = resolveSeriesVisibility({
seriesIndex: index + 1,
seriesShow: series.show,
seriesLabel: series.label ?? '',
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
});
return {
...series,

View File

@@ -49,7 +49,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
}: {
resolvedLineColor: string;
}): Partial<Series> {
const { lineWidth, lineStyle, lineCap, fillColor } = this.props;
const { lineWidth, lineStyle, lineCap } = this.props;
const lineConfig: Partial<Series> = {
stroke: resolvedLineColor,
width: lineWidth ?? 2,
@@ -63,12 +63,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
lineConfig.cap = lineCap;
}
if (fillColor) {
lineConfig.fill = fillColor;
} else if (this.props.panelType === PANEL_TYPES.BAR) {
if (this.props.panelType === PANEL_TYPES.BAR) {
lineConfig.fill = resolvedLineColor;
} else if (this.props.panelType === PANEL_TYPES.HISTOGRAM) {
lineConfig.fill = `${resolvedLineColor}40`;
}
return lineConfig;
@@ -151,8 +147,6 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
pointsConfig.show = false;
} else if (showPoints === VisibilityMode.Always) {
pointsConfig.show = true;
} else {
pointsConfig.show = false; // default to hidden
}
return pointsConfig;

View File

@@ -186,10 +186,11 @@ describe('UPlotConfigBuilder', () => {
});
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'Requests', show: true },
{ label: 'Errors', show: false },
]);
// Index 0 = x-axis/time; indices 1,2 = data series (Requests, Errors). resolveSeriesVisibility matches by seriesIndex + seriesLabel.
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue({
labels: ['x-axis', 'Requests', 'Errors'],
visibility: [true, true, false],
});
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
@@ -201,7 +202,7 @@ describe('UPlotConfigBuilder', () => {
const legendItems = builder.getLegendItems();
// When any series is hidden, visibility is driven by stored label-based preferences
// When any series is hidden, legend visibility is driven by the stored map
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].show).toBe(false);
@@ -212,109 +213,6 @@ describe('UPlotConfigBuilder', () => {
expect(secondSeries?.show).toBe(false);
});
it('hides new series by default when there is a mixed preference and a visible label matches current series', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'Requests', show: true },
{ label: 'Errors', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
builder.addSeries(createSeriesProps({ label: 'Latency' }));
const legendItems = builder.getLegendItems();
// Stored labels: Requests (visible), Errors (hidden).
// New label "Latency" should be hidden because there is a mixed preference
// and "Requests" (a visible stored label) is present in the current series.
expect(legendItems[1].label).toBe('Requests');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('Errors');
expect(legendItems[2].show).toBe(false);
expect(legendItems[3].label).toBe('Latency');
expect(legendItems[3].show).toBe(false);
const config = builder.getConfig();
const [, firstSeries, secondSeries, thirdSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('Requests');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('Errors');
expect(secondSeries?.show).toBe(false);
expect(thirdSeries?.label).toBe('Latency');
expect(thirdSeries?.show).toBe(false);
});
it('shows all series when there is a mixed preference but no visible stored labels match current series', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'StoredVisible', show: true },
{ label: 'StoredHidden', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
// None of these labels intersect with the stored visible label "StoredVisible"
builder.addSeries(createSeriesProps({ label: 'CPU' }));
builder.addSeries(createSeriesProps({ label: 'Memory' }));
const legendItems = builder.getLegendItems();
// Mixed preference exists in storage, but since no visible labels intersect
// with current series, stored preferences are ignored and all are visible.
expect(legendItems[1].label).toBe('CPU');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('Memory');
expect(legendItems[2].show).toBe(true);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('CPU');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('Memory');
expect(secondSeries?.show).toBe(true);
});
it('treats duplicate labels as visible when any stored entry for that label is visible', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-dup',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
// Two series with the same label; both should be visible because at least
// one stored entry for "CPU" is visible.
builder.addSeries(createSeriesProps({ label: 'CPU' }));
builder.addSeries(createSeriesProps({ label: 'CPU' }));
const legendItems = builder.getLegendItems();
expect(legendItems[1].label).toBe('CPU');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('CPU');
expect(legendItems[2].show).toBe(true);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('CPU');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('CPU');
expect(secondSeries?.show).toBe(true);
});
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',

View File

@@ -175,7 +175,6 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
pointsBuilder?: Series.Points.Show;
show?: boolean;
spanGaps?: boolean;
fillColor?: string;
isDarkMode?: boolean;
stepInterval?: number;
}

View File

@@ -11,6 +11,22 @@ import { Threshold } from '../hooks/types';
import { findMinMaxThresholdValues } from './threshold';
import { LogScaleLimits, RangeFunctionParams } from './types';
/**
* Rounds a number down to the nearest multiple of incr.
* Used for linear scale min so the axis starts on a clean tick.
*/
export function incrRoundDn(num: number, incr: number): number {
return Math.floor(num / incr) * incr;
}
/**
* Rounds a number up to the nearest multiple of incr.
* Used for linear scale max so the axis ends on a clean tick.
*/
export function incrRoundUp(num: number, incr: number): number {
return Math.ceil(num / incr) * incr;
}
/**
* Snaps min/max/softMin/softMax to valid log-scale values (powers of logBase).
* Only applies when distribution is logarithmic; otherwise returns limits unchanged.
@@ -197,6 +213,25 @@ function getLogScaleRange(
);
}
/**
* Snaps linear scale min down and max up to whole numbers so axis bounds are clean.
*/
function roundLinearRange(minMax: Range.MinMax): Range.MinMax {
const [currentMin, currentMax] = minMax;
let roundedMin = currentMin;
let roundedMax = currentMax;
if (roundedMin != null) {
roundedMin = incrRoundDn(roundedMin, 1);
}
if (roundedMax != null) {
roundedMax = incrRoundUp(roundedMax, 1);
}
return [roundedMin, roundedMax];
}
/**
* Snaps log-scale [min, max] to exact powers of logBase (nearest magnitude below/above).
* If min and max would be equal after snapping, max is increased by one magnitude so the range is valid.
@@ -295,6 +330,7 @@ export function createRangeFunction(
if (scale.distr === 1) {
minMax = getLinearScaleRange(minMax, params, dataMin, dataMax);
minMax = roundLinearRange(minMax);
} else if (scale.distr === 3) {
minMax = getLogScaleRange(minMax, params, dataMin, dataMax, logBase);
const logFn = scale.log === 2 ? Math.log2 : Math.log10;

View File

@@ -1,44 +1,25 @@
/**
* Resolve the visibility of a single series based on:
* - Stored per-series visibility (when applicable)
* - Whether there is an "active preference" (mix of visible/hidden that matches current series)
* - The series' own default show flag
*/
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
export function resolveSeriesVisibility({
seriesIndex,
seriesShow,
seriesLabel,
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
}: {
seriesIndex: number;
seriesShow: boolean | undefined | null;
seriesLabel: string;
visibleStoredLabels: Set<string> | null;
hiddenStoredLabels: Set<string> | null;
hasActivePreference: boolean;
seriesVisibilityState: SeriesVisibilityState | null;
isAnySeriesHidden: boolean;
}): boolean {
const isStoredVisible = !!visibleStoredLabels?.has(seriesLabel);
const isStoredHidden = !!hiddenStoredLabels?.has(seriesLabel);
// If the label is explicitly stored as visible, always show it.
if (isStoredVisible) {
return true;
if (
isAnySeriesHidden &&
seriesVisibilityState?.visibility &&
seriesVisibilityState.labels.length > seriesIndex &&
seriesVisibilityState.labels[seriesIndex] === seriesLabel
) {
return seriesVisibilityState.visibility[seriesIndex] ?? false;
}
// If the label is explicitly stored as hidden (and never stored as visible),
// always hide it.
if (isStoredHidden) {
return false;
}
// "Active preference" means:
// - There is a mix of visible/hidden in storage, AND
// - At least one stored *visible* label exists in the current series list.
// For such a preference, any new/unknown series should be hidden by default.
if (hasActivePreference) {
return false;
}
// Otherwise fall back to the series' own config or show by default.
return seriesShow ?? true;
}

View File

@@ -204,78 +204,6 @@ describe('dashboardVariablesStore', () => {
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=undefined as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: undefined,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty string selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty array selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: [] as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',

View File

@@ -76,7 +76,7 @@ export function getVariableDependencyContext(): VariableFetchContext {
(variable) => {
if (
variable.type === 'DYNAMIC' &&
(variable.selectedValue === null || isEmpty(variable.selectedValue)) &&
variable.selectedValue === null &&
variable.allSelected === true
) {
return true;

View File

@@ -7,5 +7,4 @@ export interface UpdateProfileProps {
number_of_services: number;
number_of_hosts: number;
where_did_you_discover_signoz: string;
timeline_for_migrating_to_signoz: string;
}

View File

@@ -3,33 +3,3 @@
* Example: 1.5 → 2, 1.49 → 1
*/
export const roundHalfUp = (value: number): number => Math.floor(value + 0.5);
/**
* Rounds a number down to the nearest multiple of incr.
* Used for linear scale min so the axis starts on a clean tick.
*/
export function incrRoundDn(num: number, incr: number): number {
return Math.floor(num / incr) * incr;
}
/**
* Rounds a number up to the nearest multiple of incr.
* Used for linear scale max so the axis ends on a clean tick.
*/
export function incrRoundUp(num: number, incr: number): number {
return Math.ceil(num / incr) * incr;
}
/**
* Rounds a number to the nearest multiple of 10^dec.
* Used for decimal precision.
*/
export function roundDecimals(val: number, dec = 0): number {
if (Number.isInteger(val)) {
return val;
}
const p = 10 ** dec;
const n = val * p * (1 + Number.EPSILON);
return Math.round(n) / p;
}

View File

@@ -4073,16 +4073,6 @@
"@radix-ui/react-primitive" "1.0.3"
"@radix-ui/react-slot" "1.0.2"
"@radix-ui/react-collection@1.1.7":
version "1.1.7"
resolved "https://registry.yarnpkg.com/@radix-ui/react-collection/-/react-collection-1.1.7.tgz#d05c25ca9ac4695cc19ba91f42f686e3ea2d9aec"
integrity sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==
dependencies:
"@radix-ui/react-compose-refs" "1.1.2"
"@radix-ui/react-context" "1.1.2"
"@radix-ui/react-primitive" "2.1.3"
"@radix-ui/react-slot" "1.2.3"
"@radix-ui/react-compose-refs@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@radix-ui/react-compose-refs/-/react-compose-refs-1.0.0.tgz#37595b1f16ec7f228d698590e78eeed18ff218ae"
@@ -4169,11 +4159,6 @@
dependencies:
"@babel/runtime" "^7.13.10"
"@radix-ui/react-direction@1.1.1":
version "1.1.1"
resolved "https://registry.yarnpkg.com/@radix-ui/react-direction/-/react-direction-1.1.1.tgz#39e5a5769e676c753204b792fbe6cf508e550a14"
integrity sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==
"@radix-ui/react-dismissable-layer@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.0.0.tgz#35b7826fa262fd84370faef310e627161dffa76b"
@@ -4402,22 +4387,6 @@
dependencies:
"@radix-ui/react-slot" "1.2.4"
"@radix-ui/react-radio-group@^1.3.4":
version "1.3.8"
resolved "https://registry.yarnpkg.com/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz#93f102b5b948d602c2f2adb1bc5c347cbaf64bd9"
integrity sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==
dependencies:
"@radix-ui/primitive" "1.1.3"
"@radix-ui/react-compose-refs" "1.1.2"
"@radix-ui/react-context" "1.1.2"
"@radix-ui/react-direction" "1.1.1"
"@radix-ui/react-presence" "1.1.5"
"@radix-ui/react-primitive" "2.1.3"
"@radix-ui/react-roving-focus" "1.1.11"
"@radix-ui/react-use-controllable-state" "1.2.2"
"@radix-ui/react-use-previous" "1.1.1"
"@radix-ui/react-use-size" "1.1.1"
"@radix-ui/react-roving-focus@1.0.4":
version "1.0.4"
resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.0.4.tgz#e90c4a6a5f6ac09d3b8c1f5b5e81aab2f0db1974"
@@ -4434,21 +4403,6 @@
"@radix-ui/react-use-callback-ref" "1.0.1"
"@radix-ui/react-use-controllable-state" "1.0.1"
"@radix-ui/react-roving-focus@1.1.11":
version "1.1.11"
resolved "https://registry.yarnpkg.com/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz#ef54384b7361afc6480dcf9907ef2fedb5080fd9"
integrity sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==
dependencies:
"@radix-ui/primitive" "1.1.3"
"@radix-ui/react-collection" "1.1.7"
"@radix-ui/react-compose-refs" "1.1.2"
"@radix-ui/react-context" "1.1.2"
"@radix-ui/react-direction" "1.1.1"
"@radix-ui/react-id" "1.1.1"
"@radix-ui/react-primitive" "2.1.3"
"@radix-ui/react-use-callback-ref" "1.1.1"
"@radix-ui/react-use-controllable-state" "1.2.2"
"@radix-ui/react-slot@1.0.0":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.0.0.tgz#7fa805b99891dea1e862d8f8fbe07f4d6d0fd698"
@@ -5121,20 +5075,6 @@
tailwind-merge "^2.5.2"
tailwindcss-animate "^1.0.7"
"@signozhq/radio-group@0.0.2":
version "0.0.2"
resolved "https://registry.yarnpkg.com/@signozhq/radio-group/-/radio-group-0.0.2.tgz#4b13567bfee2645226f2cf41f261bbb288e1be4b"
integrity sha512-ahykmA5hPujOC964CFveMlQ12tWSyut2CUiFRqT1QxRkOLS2R44Qn2hh2psqJJ18JMX/24ZYCAIh9Bdd5XW+7g==
dependencies:
"@radix-ui/react-icons" "^1.3.0"
"@radix-ui/react-radio-group" "^1.3.4"
"@radix-ui/react-slot" "^1.1.0"
class-variance-authority "^0.7.0"
clsx "^2.1.1"
lucide-react "^0.445.0"
tailwind-merge "^2.5.2"
tailwindcss-animate "^1.0.7"
"@signozhq/resizable@0.0.0":
version "0.0.0"
resolved "https://registry.yarnpkg.com/@signozhq/resizable/-/resizable-0.0.0.tgz#a517818b9f9bcdaeafc55ae134be86522bc90e9f"

View File

@@ -3,8 +3,9 @@ package flagger
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
var (
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
)
func MustNewRegistry() featuretypes.Registry {
@@ -17,6 +18,14 @@ func MustNewRegistry() featuretypes.Registry {
DefaultVariant: featuretypes.MustNewName("disabled"),
Variants: featuretypes.NewBooleanVariants(),
},
&featuretypes.Feature{
Name: FeatureInterpolationEnabled,
Kind: featuretypes.KindBoolean,
Stage: featuretypes.StageExperimental,
Description: "Controls whether to enable interpolation",
DefaultVariant: featuretypes.MustNewName("disabled"),
Variants: featuretypes.NewBooleanVariants(),
},
&featuretypes.Feature{
Name: FeatureKafkaSpanEval,
Kind: featuretypes.KindBoolean,

View File

@@ -1,31 +0,0 @@
package identity
import (
"context"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Module interface {
// GetRoles gets all roles for an identity
GetRoles(ctx context.Context, identityID valuer.UUID) ([]types.Role, error)
// GetRolesForIdentities gets roles for multiple identities (batch)
GetRolesForIdentities(ctx context.Context, identityIDs []valuer.UUID) (map[valuer.UUID][]types.Role, error)
// CountByRoleAndOrgID counts identities with a specific role in an org
CountByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) (int64, error)
// CreateIdentityWithRoles creates an identity and assigns roles to it
CreateIdentityWithRoles(ctx context.Context, id valuer.UUID, orgID valuer.UUID, roles []types.Role) error
// AddRole adds a role to an identity
AddRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error
// RemoveRole removes a role from an identity
RemoveRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error
// DeleteIdentity deletes an identity and its associated roles
DeleteIdentity(ctx context.Context, identityID valuer.UUID) error
}

View File

@@ -1,111 +0,0 @@
package implidentity
import (
"context"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/identitytypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type module struct {
store *store
}
func NewModule(sqlstore sqlstore.SQLStore) identity.Module {
return &module{
store: newStore(sqlstore),
}
}
func (m *module) CreateIdentityWithRoles(ctx context.Context, id valuer.UUID, orgID valuer.UUID, roles []types.Role) error {
return m.store.RunInTx(ctx, func(ctx context.Context) error {
// Create identity
ident := identitytypes.NewStorableIdentity(id, orgID)
if err := m.store.CreateIdentity(ctx, ident); err != nil {
return err
}
// Create identity_role mappings for each role
for _, role := range roles {
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
identityRole := identitytypes.NewStorableIdentityRole(id, roleName)
if err := m.store.CreateIdentityRole(ctx, identityRole); err != nil {
return err
}
}
return nil
})
}
func (m *module) AddRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error {
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
identityRole := identitytypes.NewStorableIdentityRole(identityID, roleName)
return m.store.CreateIdentityRole(ctx, identityRole)
}
func (m *module) RemoveRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error {
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
return m.store.DeleteIdentityRole(ctx, identityID, roleName)
}
func (m *module) GetRoles(ctx context.Context, identityID valuer.UUID) ([]types.Role, error) {
roleNames, err := m.store.GetRolesByIdentityID(ctx, identityID)
if err != nil {
return nil, err
}
roles := make([]types.Role, 0, len(roleNames))
for _, roleName := range roleNames {
roles = append(roles, roletypes.GetRoleFromManagedRoleName(roleName))
}
return roles, nil
}
func (m *module) GetRolesForIdentities(ctx context.Context, identityIDs []valuer.UUID) (map[valuer.UUID][]types.Role, error) {
roleNamesMap, err := m.store.GetRolesForIdentityIDs(ctx, identityIDs)
if err != nil {
return nil, err
}
result := make(map[valuer.UUID][]types.Role)
for _, id := range identityIDs {
if roleNames, ok := roleNamesMap[id.StringValue()]; ok {
roles := make([]types.Role, 0, len(roleNames))
for _, roleName := range roleNames {
roles = append(roles, roletypes.GetRoleFromManagedRoleName(roleName))
}
result[id] = roles
} else {
result[id] = []types.Role{}
}
}
return result, nil
}
func (m *module) DeleteIdentity(ctx context.Context, identityID valuer.UUID) error {
return m.store.RunInTx(ctx, func(ctx context.Context) error {
// Delete identity_role first (FK constraint)
if err := m.store.DeleteIdentityRoles(ctx, identityID); err != nil {
return err
}
// Delete identity
if err := m.store.DeleteIdentity(ctx, identityID); err != nil {
return err
}
return nil
})
}
func (m *module) CountByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) (int64, error) {
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
return m.store.CountByRoleNameAndOrgID(ctx, roleName, orgID)
}

View File

@@ -1,152 +0,0 @@
package implidentity
import (
"context"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/types/identitytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
type store struct {
sqlstore sqlstore.SQLStore
}
func newStore(sqlstore sqlstore.SQLStore) *store {
return &store{sqlstore: sqlstore}
}
func (s *store) CreateIdentity(ctx context.Context, identity *identitytypes.StorableIdentity) error {
_, err := s.
sqlstore.
BunDBCtx(ctx).
NewInsert().
Model(identity).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create identity")
}
return nil
}
func (s *store) CreateIdentityRole(ctx context.Context, identityRole *identitytypes.StorableIdentityRole) error {
_, err := s.
sqlstore.
BunDBCtx(ctx).
NewInsert().
Model(identityRole).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create identity role")
}
return nil
}
func (s *store) DeleteIdentityRole(ctx context.Context, identityID valuer.UUID, roleName string) error {
_, err := s.
sqlstore.
BunDBCtx(ctx).
NewDelete().
Model(&identitytypes.StorableIdentityRole{}).
Where("identity_id = ?", identityID).
Where("role_name = ?", roleName).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity role")
}
return nil
}
func (s *store) DeleteIdentityRoles(ctx context.Context, identityID valuer.UUID) error {
_, err := s.
sqlstore.
BunDBCtx(ctx).
NewDelete().
Model(&identitytypes.StorableIdentityRole{}).
Where("identity_id = ?", identityID).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity roles")
}
return nil
}
func (s *store) DeleteIdentity(ctx context.Context, identityID valuer.UUID) error {
_, err := s.
sqlstore.
BunDBCtx(ctx).
NewDelete().
Model(&identitytypes.StorableIdentity{}).
Where("id = ?", identityID).
Exec(ctx)
if err != nil {
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity")
}
return nil
}
func (s *store) GetRolesByIdentityID(ctx context.Context, identityID valuer.UUID) ([]string, error) {
var roleNames []string
err := s.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model((*identitytypes.StorableIdentityRole)(nil)).
Column("role_name").
Where("identity_id = ?", identityID).
Scan(ctx, &roleNames)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get roles for identity %s", identityID)
}
return roleNames, nil
}
func (s *store) GetRolesForIdentityIDs(ctx context.Context, identityIDs []valuer.UUID) (map[string][]string, error) {
if len(identityIDs) == 0 {
return map[string][]string{}, nil
}
var results []identitytypes.StorableIdentityRole
err := s.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(&results).
Column("identity_id", "role_name").
Where("identity_id IN (?)", bun.In(identityIDs)).
Scan(ctx)
if err != nil {
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get roles for identities")
}
roleMap := make(map[string][]string)
for _, r := range results {
roleMap[r.IdentityID.StringValue()] = append(roleMap[r.IdentityID.StringValue()], r.RoleName)
}
return roleMap, nil
}
func (s *store) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
return s.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
return cb(ctx)
})
}
func (s *store) CountByRoleNameAndOrgID(ctx context.Context, roleName string, orgID valuer.UUID) (int64, error) {
count, err := s.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model((*identitytypes.StorableIdentityRole)(nil)).
Join("JOIN identity ON identity.id = identity_role.identity_id").
Where("identity_role.role_name = ?", roleName).
Where("identity.org_id = ?", orgID).
Count(ctx)
if err != nil {
return 0, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to count identities by role")
}
return int64(count), nil
}

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/session"
"github.com/SigNoz/signoz/pkg/modules/user"
@@ -29,10 +28,9 @@ type module struct {
authDomain authdomain.Module
tokenizer tokenizer.Tokenizer
orgGetter organization.Getter
identity identity.Module
}
func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter, identity identity.Module) session.Module {
func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter) session.Module {
return &module{
settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/session/implsession"),
authNs: authNs,
@@ -41,7 +39,6 @@ func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.A
authDomain: authDomain,
tokenizer: tokenizer,
orgGetter: orgGetter,
identity: identity,
}
}
@@ -144,33 +141,21 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
roleMapping := authDomain.AuthDomainConfig().RoleMapping
role := roleMapping.NewRoleFromCallbackIdentity(callbackIdentity)
newUser, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, callbackIdentity.OrgID)
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID)
if err != nil {
return "", err
}
createdUser, err := module.user.GetOrCreateUser(ctx, newUser, user.WithRole(role))
user, err = module.user.GetOrCreateUser(ctx, user)
if err != nil {
return "", err
}
if err := createdUser.ErrIfRoot(); err != nil {
if err := user.ErrIfRoot(); err != nil {
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
}
// Get roles from identity module
userRoles, err := module.identity.GetRoles(ctx, createdUser.IdentityID)
if err != nil {
return "", err
}
// Use first role for token creation (backward compatibility with current token system)
var primaryRole types.Role
if len(userRoles) > 0 {
primaryRole = userRoles[0]
}
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(createdUser.ID, createdUser.OrgID, createdUser.Email, primaryRole), map[string]string{})
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
if err != nil {
return "", err
}

View File

@@ -18,11 +18,7 @@ type RootConfig struct {
Enabled bool `mapstructure:"enabled"`
Email valuer.Email `mapstructure:"email"`
Password string `mapstructure:"password"`
Org OrgConfig `mapstructure:"org"`
}
type OrgConfig struct {
Name string `mapstructure:"name"`
OrgName string `mapstructure:"org_name"`
}
type PasswordConfig struct {
@@ -48,9 +44,7 @@ func newConfig() factory.Config {
},
Root: RootConfig{
Enabled: false,
Org: OrgConfig{
Name: "default",
},
OrgName: "default",
},
}
}

View File

@@ -12,7 +12,6 @@ import (
"github.com/SigNoz/signoz/pkg/emailing"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/user"
root "github.com/SigNoz/signoz/pkg/modules/user"
@@ -34,11 +33,10 @@ type Module struct {
authz authz.AuthZ
analytics analytics.Analytics
config user.Config
identity identity.Module
}
// This module is a WIP, don't take inspiration from this.
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config, identity identity.Module) root.Module {
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
return &Module{
store: store,
@@ -49,7 +47,6 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
analytics: analytics,
authz: authz,
config: config,
identity: identity,
}
}
@@ -59,7 +56,7 @@ func (m *Module) AcceptInvite(ctx context.Context, token string, password string
return nil, err
}
user, err := types.NewUser(invite.Name, invite.Email, invite.OrgID)
user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID)
if err != nil {
return nil, err
}
@@ -69,7 +66,7 @@ func (m *Module) AcceptInvite(ctx context.Context, token string, password string
return nil, err
}
err = m.CreateUser(ctx, user, root.WithFactorPassword(factorPassword), root.WithRole(invite.Role))
err = m.CreateUser(ctx, user, root.WithFactorPassword(factorPassword))
if err != nil {
return nil, err
}
@@ -175,21 +172,14 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID)
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
createUserOpts := root.NewCreateUserOptions(opts...)
role := createUserOpts.Role
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
if err != nil {
return err
}
if err := module.store.RunInTx(ctx, func(ctx context.Context) error {
// Create identity with roles via identity module
if err := module.identity.CreateIdentityWithRoles(ctx, input.ID, input.OrgID, []types.Role{role}); err != nil {
return err
}
// Create user
if err := module.store.CreateUser(ctx, input); err != nil {
return err
}
@@ -205,7 +195,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
return err
}
traitsOrProperties := types.NewTraitsFromUser(input, []types.Role{role})
traitsOrProperties := types.NewTraitsFromUser(input)
module.analytics.IdentifyUser(ctx, input.OrgID.String(), input.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, input.OrgID.String(), input.ID.String(), "User Created", traitsOrProperties)
@@ -222,7 +212,40 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
return nil, errors.WithAdditionalf(err, "cannot update root user")
}
existingUser.Update(user.DisplayName)
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
if err != nil {
return nil, err
}
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
}
// Make sure that the request is not demoting the last admin user.
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
if err != nil {
return nil, err
}
if len(adminUsers) == 1 {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot demote the last admin")
}
}
if user.Role != "" && user.Role != existingUser.Role {
err = m.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil),
)
if err != nil {
return nil, err
}
}
existingUser.Update(user.DisplayName, user.Role)
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
return nil, err
}
@@ -235,9 +258,7 @@ func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user
return err
}
// Get roles from identity module for analytics
roles, _ := module.identity.GetRoles(ctx, user.IdentityID)
traits := types.NewTraitsFromUser(user, roles)
traits := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
@@ -262,37 +283,19 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
}
// Get user's roles from identity module
userRoles, err := module.identity.GetRoles(ctx, user.IdentityID)
// don't allow to delete the last admin user
adminUsers, err := module.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
if err != nil {
return err
}
// Check if user has admin role
hasAdminRole := slices.Contains(userRoles, types.RoleAdmin)
// don't allow to delete the last admin user
if hasAdminRole {
adminCount, err := module.identity.CountByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
if err != nil {
return err
}
if adminCount == 1 {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
}
if len(adminUsers) == 1 && user.Role == types.RoleAdmin {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
}
// Revoke all roles for the user
for _, userRole := range userRoles {
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(userRole), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
}
// Delete identity and identity_role records
if err := module.identity.DeleteIdentity(ctx, user.IdentityID); err != nil {
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
if err != nil {
return err
}
@@ -308,15 +311,6 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
}
func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID valuer.UUID) (*types.ResetPasswordToken, error) {
user, err := module.store.GetUser(ctx, userID)
if err != nil {
return nil, err
}
if err := user.ErrIfRoot(); err != nil {
return nil, errors.WithAdditionalf(err, "cannot reset password for root user")
}
password, err := module.store.GetPasswordByUserID(ctx, userID)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
@@ -542,7 +536,7 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
return err
}
err = module.createUserWithoutGrant(ctx, user, root.WithFactorPassword(password), root.WithRole(types.RoleAdmin))
err = module.createUserWithoutGrant(ctx, user, root.WithFactorPassword(password))
if err != nil {
return err
}
@@ -572,15 +566,7 @@ func (module *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
func (module *Module) createUserWithoutGrant(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
createUserOpts := root.NewCreateUserOptions(opts...)
role := createUserOpts.Role
if err := module.store.RunInTx(ctx, func(ctx context.Context) error {
// Create identity with roles via identity module
if err := module.identity.CreateIdentityWithRoles(ctx, input.ID, input.OrgID, []types.Role{role}); err != nil {
return err
}
// Create user
if err := module.store.CreateUser(ctx, input); err != nil {
return err
}
@@ -596,7 +582,7 @@ func (module *Module) createUserWithoutGrant(ctx context.Context, input *types.U
return err
}
traitsOrProperties := types.NewTraitsFromUser(input, []types.Role{role})
traitsOrProperties := types.NewTraitsFromUser(input)
module.analytics.IdentifyUser(ctx, input.OrgID.String(), input.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, input.OrgID.String(), input.ID.String(), "User Created", traitsOrProperties)

View File

@@ -2,13 +2,11 @@ package impluser
import (
"context"
"slices"
"time"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
@@ -23,7 +21,6 @@ type service struct {
module user.Module
orgGetter organization.Getter
authz authz.AuthZ
identity identity.Module
config user.RootConfig
stopC chan struct{}
}
@@ -34,7 +31,6 @@ func NewService(
module user.Module,
orgGetter organization.Getter,
authz authz.AuthZ,
identity identity.Module,
config user.RootConfig,
) user.Service {
return &service{
@@ -43,7 +39,6 @@ func NewService(
module: module,
orgGetter: orgGetter,
authz: authz,
identity: identity,
config: config,
stopC: make(chan struct{}),
}
@@ -83,10 +78,10 @@ func (s *service) Stop(ctx context.Context) error {
}
func (s *service) reconcile(ctx context.Context) error {
org, err := s.orgGetter.GetByName(ctx, s.config.Org.Name)
org, err := s.orgGetter.GetByName(ctx, s.config.OrgName)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
newOrg := types.NewOrganization(s.config.Org.Name, s.config.Org.Name)
newOrg := types.NewOrganizationWithName(s.config.OrgName)
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
return err
}
@@ -117,39 +112,20 @@ func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID
}
if existingUser != nil {
// Get user's current roles from identity module
userRoles, err := s.identity.GetRoles(ctx, existingUser.IdentityID)
if err != nil {
return err
}
oldRole := existingUser.Role
existingUser.PromoteToRoot()
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
return err
}
// If user doesn't have admin role, modify the grant and update identity roles
if !slices.Contains(userRoles, types.RoleAdmin) {
// Update authz grants and identity roles
for _, oldRole := range userRoles {
// Update authz grant
if err := s.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
); err != nil {
return err
}
// Update identity role
if err := s.identity.RemoveRole(ctx, existingUser.IdentityID, orgID, oldRole); err != nil {
return err
}
}
// Add admin role to identity
if err := s.identity.AddRole(ctx, existingUser.IdentityID, orgID, types.RoleAdmin); err != nil {
if oldRole != types.RoleAdmin {
if err := s.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
); err != nil {
return err
}
}
@@ -168,7 +144,7 @@ func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID
return err
}
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword), user.WithRole(types.RoleAdmin))
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
}
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {

View File

@@ -192,6 +192,24 @@ func (store *store) GetUserByEmailAndOrgID(ctx context.Context, email valuer.Ema
return user, nil
}
func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) ([]*types.User, error) {
var users []*types.User
err := store.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(&users).
Where("org_id = ?", orgID).
Where("role = ?", role).
Scan(ctx)
if err != nil {
return nil, err
}
return users, nil
}
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
_, err := store.
sqlstore.
@@ -200,6 +218,7 @@ func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *typ
Model(user).
Column("display_name").
Column("email").
Column("role").
Column("is_root").
Column("updated_at").
Where("org_id = ?", orgID).
@@ -600,7 +619,6 @@ func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
}
return user, nil
}

View File

@@ -7,7 +7,6 @@ import (
type createUserOptions struct {
FactorPassword *types.FactorPassword
Role types.Role
}
type CreateUserOption func(*createUserOptions)
@@ -18,16 +17,9 @@ func WithFactorPassword(factorPassword *types.FactorPassword) CreateUserOption {
}
}
func WithRole(role types.Role) CreateUserOption {
return func(o *createUserOptions) {
o.Role = role
}
}
func NewCreateUserOptions(opts ...CreateUserOption) *createUserOptions {
o := &createUserOptions{
FactorPassword: nil,
Role: types.RoleViewer, // default role
}
for _, opt := range opts {

View File

@@ -2050,7 +2050,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
return
}
organization := types.NewOrganization(req.OrgDisplayName, req.OrgName)
organization := types.NewOrganization(req.OrgDisplayName)
user, errv2 := aH.Signoz.Modules.User.CreateFirstUser(r.Context(), organization, req.Name, req.Email, req.Password)
if errv2 != nil {
render.Error(w, errv2)

View File

@@ -43,11 +43,11 @@ var (
// FromUnit returns a converter for the given unit
func FromUnit(u Unit) Converter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationConverter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataConverter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateConverter
case "percent", "percentunit", "%":
return PercentConverter

View File

@@ -58,80 +58,36 @@ func (*dataConverter) Name() string {
return "data"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataUnit(u Unit) float64 {
switch u {
case "bytes", "By": // base 2
return Byte
case "decbytes": // base 10
return Byte
case "bits", "bit": // base 2
case "bits": // base 2
return Bit
case "decbits": // base 10
return Bit
case "kbytes", "KiBy": // base 2
case "kbytes", "kBy": // base 2
return Kibibyte
case "decKbytes", "deckbytes", "kBy": // base 10
case "decKbytes", "deckbytes": // base 10
return Kilobyte
case "mbytes", "MiBy": // base 2
case "mbytes", "MBy": // base 2
return Mebibyte
case "decMbytes", "decmbytes", "MBy": // base 10
case "decMbytes", "decmbytes": // base 10
return Megabyte
case "gbytes", "GiBy": // base 2
case "gbytes", "GBy": // base 2
return Gibibyte
case "decGbytes", "decgbytes", "GBy": // base 10
case "decGbytes", "decgbytes": // base 10
return Gigabyte
case "tbytes", "TiBy": // base 2
case "tbytes", "TBy": // base 2
return Tebibyte
case "decTbytes", "dectbytes", "TBy": // base 10
case "decTbytes", "dectbytes": // base 10
return Terabyte
case "pbytes", "PiBy": // base 2
case "pbytes", "PBy": // base 2
return Pebibyte
case "decPbytes", "decpbytes", "PBy": // base 10
case "decPbytes", "decpbytes": // base 10
return Petabyte
case "EBy": // base 10
return Exabyte
case "ZBy": // base 10
return Zettabyte
case "YBy": // base 10
return Yottabyte
case "Kibit": // base 2
return Kibibit
case "Mibit": // base 2
return Mebibit
case "Gibit": // base 2
return Gibibit
case "Tibit": // base 2
return Tebibit
case "Pibit": // base 2
return Pebibit
case "EiBy": // base 2
return Exbibyte
case "ZiBy": // base 2
return Zebibyte
case "YiBy": // base 2
return Yobibyte
case "kbit": // base 10
return Kilobit
case "Mbit": // base 10
return Megabit
case "Gbit": // base 10
return Gigabit
case "Tbit": // base 10
return Terabit
case "Pbit": // base 10
return Petabit
case "Ebit": // base 10
return Exabit
case "Zbit": // base 10
return Zettabit
case "Ybit": // base 10
return Yottabit
default:
return 1
}

View File

@@ -54,12 +54,6 @@ func (*dataRateConverter) Name() string {
return "data_rate"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataRateUnit(u Unit) float64 {
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
switch u {
@@ -71,70 +65,46 @@ func FromDataRateUnit(u Unit) float64 {
return BitPerSecond
case "bps", "bit/s": // bits/sec(SI)
return BitPerSecond
case "KiBs", "KiBy/s": // kibibytes/sec
case "KiBs": // kibibytes/sec
return KibibytePerSecond
case "Kibits", "Kibit/s": // kibibits/sec
case "Kibits": // kibibits/sec
return KibibitPerSecond
case "KBs", "kBy/s": // kilobytes/sec
return KilobytePerSecond
case "Kbits", "kbit/s": // kilobits/sec
return KilobitPerSecond
case "MiBs", "MiBy/s": // mebibytes/sec
case "MiBs": // mebibytes/sec
return MebibytePerSecond
case "Mibits", "Mibit/s": // mebibits/sec
case "Mibits": // mebibits/sec
return MebibitPerSecond
case "MBs", "MBy/s": // megabytes/sec
return MegabytePerSecond
case "Mbits", "Mbit/s": // megabits/sec
return MegabitPerSecond
case "GiBs", "GiBy/s": // gibibytes/sec
case "GiBs": // gibibytes/sec
return GibibytePerSecond
case "Gibits", "Gibit/s": // gibibits/sec
case "Gibits": // gibibits/sec
return GibibitPerSecond
case "GBs", "GBy/s": // gigabytes/sec
return GigabytePerSecond
case "Gbits", "Gbit/s": // gigabits/sec
return GigabitPerSecond
case "TiBs", "TiBy/s": // tebibytes/sec
case "TiBs": // tebibytes/sec
return TebibytePerSecond
case "Tibits", "Tibit/s": // tebibits/sec
case "Tibits": // tebibits/sec
return TebibitPerSecond
case "TBs", "TBy/s": // terabytes/sec
return TerabytePerSecond
case "Tbits", "Tbit/s": // terabits/sec
return TerabitPerSecond
case "PiBs", "PiBy/s": // pebibytes/sec
case "PiBs": // pebibytes/sec
return PebibytePerSecond
case "Pibits", "Pibit/s": // pebibits/sec
case "Pibits": // pebibits/sec
return PebibitPerSecond
case "PBs", "PBy/s": // petabytes/sec
return PetabytePerSecond
case "Pbits", "Pbit/s": // petabits/sec
return PetabitPerSecond
case "EBy/s": // exabytes/sec
return ExabytePerSecond
case "Ebit/s": // exabits/sec
return ExabitPerSecond
case "EiBy/s": // exbibytes/sec
return ExbibytePerSecond
case "Eibit/s": // exbibits/sec
return ExbibitPerSecond
case "ZBy/s": // zettabytes/sec
return ZettabytePerSecond
case "Zbit/s": // zettabits/sec
return ZettabitPerSecond
case "ZiBy/s": // zebibytes/sec
return ZebibytePerSecond
case "Zibit/s": // zebibits/sec
return ZebibitPerSecond
case "YBy/s": // yottabytes/sec
return YottabytePerSecond
case "Ybit/s": // yottabits/sec
return YottabitPerSecond
case "YiBy/s": // yobibytes/sec
return YobibytePerSecond
case "Yibit/s": // yobibits/sec
return YobibitPerSecond
default:
return 1
}

View File

@@ -75,83 +75,3 @@ func TestDataRate(t *testing.T) {
// 1024 * 1024 * 1024 bytes = 1 gbytes
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
}
func TestDataRateConversionUCUMUnit(t *testing.T) {
dataRateConverter := NewDataRateConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Binary byte scaling
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
// Unit alias
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
// Binary bit scaling (Exbi, Zebi, Yobi)
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
// Bytes to bits (Exbi, Zebi, Yobi)
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
// 1024 bytes = 1 kbytes
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
// 1 byte = 8 bits
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
// 1 mbytes = 1024 kbytes
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
// 1024 kbytes = 1 mbytes
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
// 1 mbytes = 1024 * 1024 bytes
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
// 1024 mbytes = 1 gbytes
@@ -45,90 +45,10 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
// 1024 tbytes = 1 PiBy
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
// 1 pbytes = 1024 tbytes
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
// 1024 TiBy = 1 pbytes
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
}
func TestDataConversionUCUMUnit(t *testing.T) {
dataConverter := NewDataConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Bits to bytes
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
// Binary byte scaling
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
// SI bit scaling
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
// 1024 pbytes = 1 tbytes
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
}

View File

@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
return Hour
case "d":
return Day
case "w", "wk":
case "w":
return Week
default:
return Second

View File

@@ -54,13 +54,4 @@ func TestDurationConvert(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
// 1000000000 ns = 1 s
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
// 7 d = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
// 1 wk = 7 d
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
// 1 wk = 168 h
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
// 604800 s = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
}

View File

@@ -24,62 +24,30 @@ func (f *dataFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value))
case "decbytes":
return humanize.Bytes(uint64(value))
case "bits", "bit":
// humanize.IBytes/Bytes doesn't support bits
// and returns 0 B for values less than a byte
if value < 8 {
return fmt.Sprintf("%v b", value)
}
return humanize.IBytes(uint64(value / 8))
case "bits":
return humanize.IBytes(uint64(value * converter.Bit))
case "decbits":
if value < 8 {
return fmt.Sprintf("%v b", value)
}
return humanize.Bytes(uint64(value / 8))
case "kbytes", "KiBy":
return humanize.Bytes(uint64(value * converter.Bit))
case "kbytes", "kBy":
return humanize.IBytes(uint64(value * converter.Kibibit))
case "Kibit":
return humanize.IBytes(uint64(value * converter.Kibibit / 8))
case "decKbytes", "deckbytes", "kBy":
return humanize.Bytes(uint64(value * converter.Kilobit))
case "kbit":
return humanize.Bytes(uint64(value * converter.Kilobit / 8))
case "mbytes", "MiBy":
case "decKbytes", "deckbytes":
return humanize.IBytes(uint64(value * converter.Kilobit))
case "mbytes", "MBy":
return humanize.IBytes(uint64(value * converter.Mebibit))
case "Mibit":
return humanize.IBytes(uint64(value * converter.Mebibit / 8))
case "decMbytes", "decmbytes", "MBy":
case "decMbytes", "decmbytes":
return humanize.Bytes(uint64(value * converter.Megabit))
case "Mbit":
return humanize.Bytes(uint64(value * converter.Megabit / 8))
case "gbytes", "GiBy":
case "gbytes", "GBy":
return humanize.IBytes(uint64(value * converter.Gibibit))
case "Gibit":
return humanize.IBytes(uint64(value * converter.Gibibit / 8))
case "decGbytes", "decgbytes", "GBy":
case "decGbytes", "decgbytes":
return humanize.Bytes(uint64(value * converter.Gigabit))
case "Gbit":
return humanize.Bytes(uint64(value * converter.Gigabit / 8))
case "tbytes", "TiBy":
case "tbytes", "TBy":
return humanize.IBytes(uint64(value * converter.Tebibit))
case "Tibit":
return humanize.IBytes(uint64(value * converter.Tebibit / 8))
case "decTbytes", "dectbytes", "TBy":
case "decTbytes", "dectbytes":
return humanize.Bytes(uint64(value * converter.Terabit))
case "Tbit":
return humanize.Bytes(uint64(value * converter.Terabit / 8))
case "pbytes", "PiBy":
case "pbytes", "PBy":
return humanize.IBytes(uint64(value * converter.Pebibit))
case "Pbit":
return humanize.Bytes(uint64(value * converter.Petabit / 8))
case "decPbytes", "decpbytes", "PBy":
case "decPbytes", "decpbytes":
return humanize.Bytes(uint64(value * converter.Petabit))
case "EiBy":
return humanize.IBytes(uint64(value * converter.Exbibit))
case "Ebit":
return humanize.Bytes(uint64(value * converter.Exabit / 8))
case "EBy":
return humanize.Bytes(uint64(value * converter.Exabit))
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -25,66 +25,49 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
case "Bps", "By/s":
return humanize.Bytes(uint64(value)) + "/s"
case "binbps":
// humanize.IBytes/Bytes doesn't support bits
// and returns 0 B for values less than a byte
if value < 8 {
return fmt.Sprintf("%v b/s", value)
}
return humanize.IBytes(uint64(value/8)) + "/s"
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
case "bps", "bit/s":
if value < 8 {
return fmt.Sprintf("%v b/s", value)
}
return humanize.Bytes(uint64(value/8)) + "/s"
case "KiBs", "KiBy/s":
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
case "KiBs":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
case "Kibits", "Kibit/s":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond/8)) + "/s"
case "Kibits":
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
case "KBs", "kBy/s":
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
case "Kbits", "kbit/s":
return humanize.Bytes(uint64(value*converter.KilobitPerSecond/8)) + "/s"
case "MiBs", "MiBy/s":
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
case "MiBs":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
case "Mibits", "Mibit/s":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond/8)) + "/s"
case "Mibits":
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
case "MBs", "MBy/s":
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
case "Mbits", "Mbit/s":
return humanize.Bytes(uint64(value*converter.MegabitPerSecond/8)) + "/s"
case "GiBs", "GiBy/s":
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
case "GiBs":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
case "Gibits", "Gibit/s":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond/8)) + "/s"
case "Gibits":
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
case "GBs", "GBy/s":
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
case "Gbits", "Gbit/s":
return humanize.Bytes(uint64(value*converter.GigabitPerSecond/8)) + "/s"
case "TiBs", "TiBy/s":
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
case "TiBs":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
case "Tibits", "Tibit/s":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond/8)) + "/s"
case "Tibits":
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
case "TBs", "TBy/s":
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
case "Tbits", "Tbit/s":
return humanize.Bytes(uint64(value*converter.TerabitPerSecond/8)) + "/s"
case "PiBs", "PiBy/s":
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
case "PiBs":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
case "Pibits", "Pibit/s":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond/8)) + "/s"
case "Pibits":
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
case "PBs", "PBy/s":
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
case "Pbits", "Pbit/s":
return humanize.Bytes(uint64(value*converter.PetabitPerSecond/8)) + "/s"
// Exa units
case "EBy/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
case "Ebit/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond/8)) + "/s"
case "EiBy/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
case "Eibit/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond/8)) + "/s"
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -1,150 +0,0 @@
package formatter
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDataRateFormatterComprehensive(t *testing.T) {
dataRateFormatter := NewDataRateFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Bits/sec - binbps, bps
{name: "binbps as Bps", value: 7, unit: "binbps", expected: "7 b/s"},
{name: "100 binbps as 12 Bps", value: 100, unit: "binbps", expected: "12 B/s"},
{name: "binbps as 23 GiBs", value: 8 * 1024 * 1024 * 1024 * 23, unit: "binbps", expected: "23 GiB/s"},
// SI Bits/sec - bps, bit/s
{name: "bps as Bps", value: 5, unit: "bps", expected: "5 b/s"},
{name: "200 bitps as 25 Bps", value: 200, unit: "bit/s", expected: "25 B/s"},
{name: "bitps as MBs", value: 8 * 1000 * 1000 * 7, unit: "bit/s", expected: "7.0 MB/s"},
// IEC Base bytes/sec - binBps
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
// SI Base bytes/sec - Bps, By/s
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
// Kibibytes/sec - KiBs, KiBy/s
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
// Kibibits/sec - Kibits, Kibit/s
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "128 B/s"},
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "5.3 MiB/s"},
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "1.3 KiB/s"},
// Kilobytes/sec (SI) - KBs, kBy/s
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
// Kilobits/sec (SI) - Kbits, kbit/s
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "125 B/s"},
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "125 B/s"},
// Mebibytes/sec - MiBs, MiBy/s
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
// Mebibits/sec - Mibits, Mibit/s
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "5.0 MiB/s"},
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "1.3 MiB/s"},
// Megabytes/sec (SI) - MBs, MBy/s
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
// Megabits/sec (SI) - Mbits, Mbit/s
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "125 kB/s"},
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "125 kB/s"},
// Gibibytes/sec - GiBs, GiBy/s
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
// Gibibits/sec - Gibits, Gibit/s
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "5.3 TiB/s"},
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "5.3 TiB/s"},
// Gigabytes/sec (SI) - GBs, GBy/s
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
// Gigabits/sec (SI) - Gbits, Gbit/s
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "5.3 TB/s"},
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "5.3 TB/s"},
// Tebibytes/sec - TiBs, TiBy/s
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
// Tebibits/sec - Tibits, Tibit/s
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "5.3 PiB/s"},
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "5.3 PiB/s"},
// Terabytes/sec (SI) - TBs, TBy/s
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
// Terabits/sec (SI) - Tbits, Tbit/s
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "5.3 PB/s"},
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "5.3 PB/s"},
// Pebibytes/sec - PiBs, PiBy/s
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
// Pebibits/sec - Pibits, Pibit/s
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "1.3 EiB/s"},
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "1.3 EiB/s"},
// Petabytes/sec (SI) - PBs, PBy/s
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
// Petabits/sec (SI) - Pbits, Pbit/s
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "5.3 PB/s"},
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "5.3 PB/s"},
// Exabytes/sec (SI) - EBy/s
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
// Exabits/sec (SI) - Ebit/s
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "1.3 EB/s"},
// Exbibytes/sec (IEC) - EiBy/s
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
// Exbibits/sec (IEC) - Eibit/s
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "1.3 EiB/s"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -14,174 +14,21 @@ func TestData(t *testing.T) {
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
}
func TestDataFormatterComprehensive(t *testing.T) {
dataFormatter := NewDataFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Bits - bits, bit
{name: "bits: 1", value: 1, unit: "bits", expected: "1 b"},
{name: "bits: 7", value: 7, unit: "bit", expected: "7 b"},
{name: "bits: to MiB = 8 * 1024 * 1024 * 9", value: 8 * 1024 * 1024 * 9, unit: "bits", expected: "9.0 MiB"},
// SI Bits - decbits, bit
{name: "decbits: 1", value: 1, unit: "decbits", expected: "1 b"},
{name: "decbits: 7", value: 7, unit: "decbits", expected: "7 b"},
{name: "decbits: to MB = 8 * 1000 * 1000 * 4", value: 8 * 1000 * 1000 * 4, unit: "decbits", expected: "4.0 MB"},
// IEC Base bytes - bytes, By
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
// SI Base bytes - decbytes
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
// Kibibytes - kbytes, KiBy (IEC)
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
// SI Kilobytes - decKbytes, deckbytes, kBy
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
// Mebibytes - mbytes, MiBy (IEC)
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
// SI Megabytes - decMbytes, decmbytes, MBy
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
// Gibibytes - gbytes, GiBy (IEC)
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
// SI Gigabytes - decGbytes, decgbytes, GBy
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
// Tebibytes - tbytes, TiBy (IEC)
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
// SI Terabytes - decTbytes, dectbytes, TBy
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
// Pebibytes - pbytes, PiBy (IEC)
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
// SI Petabytes - decPbytes, decpbytes, PBy
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
// Exbibytes - EiBy (IEC)
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
// Exabytes - EBy (SI)
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
// Kibibits - Kibit (IEC): 1 Kibit = 1024 bits = 128 bytes
{name: "Kibit: 1 = 128 B", value: 1, unit: "Kibit", expected: "128 B"},
{name: "Kibit: 1024 = 128 KiB", value: 1024, unit: "Kibit", expected: "128 KiB"},
// Mebibits - Mibit (IEC): 1 Mibit = 1024 Kibit = 128 KiB
{name: "Mibit: 1 = 128 KiB", value: 1, unit: "Mibit", expected: "128 KiB"},
{name: "Mibit: 1024 = 128 MiB", value: 1024, unit: "Mibit", expected: "128 MiB"},
// Gibibits - Gibit (IEC): 1 Gibit = 1024 Mibit = 128 MiB
{name: "Gibit: 1 = 128 MiB", value: 1, unit: "Gibit", expected: "128 MiB"},
{name: "Gibit: 42*1024 = 5.3 TiB", value: 42 * 1024, unit: "Gibit", expected: "5.3 TiB"},
// Tebibits - Tibit (IEC): 1 Tibit = 1024 Gibit = 128 GiB
{name: "Tibit: 1 = 128 GiB", value: 1, unit: "Tibit", expected: "128 GiB"},
{name: "Tibit: 42*1024 = 5.3 PiB", value: 42 * 1024, unit: "Tibit", expected: "5.3 PiB"},
// Kilobits - kbit (SI): 1 kbit = 1000 bits = 125 bytes
{name: "kbit: 1 = 125 B", value: 1, unit: "kbit", expected: "125 B"},
{name: "kbit: 1000 = 125 kB", value: 1000, unit: "kbit", expected: "125 kB"},
// Megabits - Mbit (SI): 1 Mbit = 1000 kbit = 125 kB
{name: "Mbit: 1 = 125 kB", value: 1, unit: "Mbit", expected: "125 kB"},
{name: "Mbit: 1000 = 125 MB", value: 1000, unit: "Mbit", expected: "125 MB"},
// Gigabits - Gbit (SI): 1 Gbit = 1000 Mbit = 125 MB
{name: "Gbit: 1 = 125 MB", value: 1, unit: "Gbit", expected: "125 MB"},
{name: "Gbit: 42*1000 = 5.3 TB", value: 42 * 1000, unit: "Gbit", expected: "5.3 TB"},
// Terabits - Tbit (SI): 1 Tbit = 1000 Gbit = 125 GB
{name: "Tbit: 1 = 125 GB", value: 1, unit: "Tbit", expected: "125 GB"},
{name: "Tbit: 42*1000 = 5.3 PB", value: 42 * 1000, unit: "Tbit", expected: "5.3 PB"},
// Petabits - Pbit (SI): 1 Pbit = 1000 Tbit = 125 TB
{name: "Pbit: 1 = 125 TB", value: 1, unit: "Pbit", expected: "125 TB"},
{name: "Pbit: 42 = 5.3 PB", value: 42, unit: "Pbit", expected: "5.3 PB"},
// Exabits - Ebit (SI): 1 Ebit = 1000 Pbit = 125 PB
{name: "Ebit: 1 = 125 PB", value: 1, unit: "Ebit", expected: "125 PB"},
{name: "Ebit: 10 = 1.3 EB", value: 10, unit: "Ebit", expected: "1.3 EB"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
}

View File

@@ -18,11 +18,11 @@ var (
func FromUnit(u string) Formatter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationFormatter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataFormatter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateFormatter
case "percent", "percentunit", "%":
return PercentFormatter

View File

@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
return toHours(value)
case "d":
return toDays(value)
case "w", "wk":
case "w":
return toWeeks(value)
}
// When unit is not matched, return the value as it is.

View File

@@ -205,7 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
key.Indexes = matchingKey.Indexes
key.Materialized = matchingKey.Materialized
key.JSONPlan = matchingKey.JSONPlan
return actions
} else {
// multiple matching keys, set materialized only if all the keys are materialized

View File

@@ -483,22 +483,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
value1 := v.Visit(values[0])
value2 := v.Visit(values[1])
switch value1.(type) {
case float64:
if _, ok := value2.(float64); !ok {
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
return ""
}
case string:
if _, ok := value2.(string); !ok {
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
return ""
}
default:
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
return ""
}
var conds []string
for _, key := range keys {
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
@@ -871,7 +855,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
// 1. either user meant key ( this is already handled above in fieldKeysForName )
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
// Note:
// Note:
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
// If user only wants to search `key`, then they have to use `key`
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity

View File

@@ -375,6 +375,13 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
}
if os.Getenv("INTERPOLATION_ENABLED") != "" {
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
if config.Flagger.Config.Boolean == nil {
config.Flagger.Config.Boolean = make(map[string]bool)
}
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
}
}
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {

View File

@@ -13,8 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/modules/authdomain"
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
"github.com/SigNoz/signoz/pkg/modules/dashboard"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/modules/identity/implidentity"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
"github.com/SigNoz/signoz/pkg/modules/organization"
@@ -56,7 +54,6 @@ type Modules struct {
Preference preference.Module
User user.Module
UserGetter user.Getter
Identity identity.Module
SavedView savedview.Module
Apdex apdex.Module
Dashboard dashboard.Module
@@ -91,8 +88,7 @@ func NewModules(
) Modules {
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
identityModule := implidentity.NewModule(sqlstore)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User, identityModule)
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
@@ -105,12 +101,11 @@ func NewModules(
Dashboard: dashboard,
User: user,
UserGetter: userGetter,
Identity: identityModule,
QuickFilter: quickfilter,
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
RawDataExport: implrawdataexport.NewModule(querier),
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter, identityModule),
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
Services: implservices.NewModule(querier, telemetryStore),
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),

View File

@@ -23,7 +23,6 @@ import (
"github.com/SigNoz/signoz/pkg/global"
"github.com/SigNoz/signoz/pkg/global/signozglobal"
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
@@ -169,10 +168,6 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
sqlmigration.NewAddIdentityFactory(sqlstore, sqlschema),
sqlmigration.NewUpdateUserIdentity(sqlstore, sqlschema),
sqlmigration.NewMigrateUserRolesFactory(sqlstore, sqlschema),
)
}
@@ -224,9 +219,9 @@ func NewSharderProviderFactories() factory.NamedMap[factory.ProviderFactory[shar
)
}
func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, identityModule identity.Module, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] {
func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] {
return factory.MustNewNamedMap(
analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, identityModule, tokenizer, build, analyticsConfig),
analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, tokenizer, build, analyticsConfig),
noopstatsreporter.NewFactory(),
)
}

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/modules/identity/implidentity"
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
"github.com/SigNoz/signoz/pkg/queryparser"
@@ -76,12 +75,10 @@ func TestNewProviderFactories(t *testing.T) {
})
assert.NotPanics(t, func() {
sqlStore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)
userGetter := impluser.NewGetter(impluser.NewStore(sqlStore, instrumentationtest.New().ToProviderSettings()))
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), nil)
identityModule := implidentity.NewModule(sqlStore)
userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings()))
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil)
telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual)
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, identityModule, tokenizertest.NewMockTokenizer(t), version.Build{}, analytics.Config{Enabled: true})
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, tokenizertest.NewMockTokenizer(t), version.Build{}, analytics.Config{Enabled: true})
})
assert.NotPanics(t, func() {

View File

@@ -389,7 +389,7 @@ func New(
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, modules.Identity, config.User.Root)
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
@@ -424,7 +424,7 @@ func New(
ctx,
providerSettings,
config.StatsReporter,
NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, modules.Identity, tokenizer, version.Info, config.Analytics),
NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, tokenizer, version.Info, config.Analytics),
config.StatsReporter.Provider(),
)
if err != nil {

View File

@@ -1,61 +0,0 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type addUserEmailOrgIDIndex struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewAddUserEmailOrgIDIndexFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_user_email_org_id_index"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return &addUserEmailOrgIDIndex{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
})
}
func (migration *addUserEmailOrgIDIndex) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addUserEmailOrgIDIndex) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
sqls := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"email", "org_id"}})
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *addUserEmailOrgIDIndex) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -1,111 +0,0 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type addIdentity struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewAddIdentityFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_identity"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return &addIdentity{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
})
}
func (migration *addIdentity) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addIdentity) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
sqls := [][]byte{}
tableSQLs := migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
Name: "identity",
Columns: []*sqlschema.Column{
{Name: "id", DataType: sqlschema.DataTypeText, Nullable: false},
{Name: "status", DataType: sqlschema.DataTypeText, Nullable: false},
{Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
{Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
{Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false},
},
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{
ColumnNames: []sqlschema.ColumnName{"id"},
},
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
{
ReferencingColumnName: sqlschema.ColumnName("org_id"),
ReferencedTableName: sqlschema.TableName("organizations"),
ReferencedColumnName: sqlschema.ColumnName("id"),
},
},
})
sqls = append(sqls, tableSQLs...)
tableSQLs = migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
Name: "identity_role",
Columns: []*sqlschema.Column{
{Name: "id", DataType: sqlschema.DataTypeText, Nullable: false},
{Name: "identity_id", DataType: sqlschema.DataTypeText, Nullable: false},
{Name: "role_name", DataType: sqlschema.DataTypeText, Nullable: false},
{Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
{Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
},
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{
ColumnNames: []sqlschema.ColumnName{"id"},
},
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
{
ReferencingColumnName: sqlschema.ColumnName("identity_id"),
ReferencedTableName: sqlschema.TableName("identity"),
ReferencedColumnName: sqlschema.ColumnName("id"),
},
},
})
sqls = append(sqls, tableSQLs...)
indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{
TableName: "identity_role",
ColumnNames: []sqlschema.ColumnName{"identity_id", "role_name"},
})
sqls = append(sqls, indexSQLs...)
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *addIdentity) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -1,132 +0,0 @@
package sqlmigration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type updateUserIdentity struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewUpdateUserIdentity(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("update_user_identity"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return &updateUserIdentity{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
})
}
func (migration *updateUserIdentity) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *updateUserIdentity) Up(ctx context.Context, db *bun.DB) error {
// 1. Disable FK enforcement
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
// 2. Fetch existing users
type existingUser struct {
bun.BaseModel `bun:"table:users"`
ID string `bun:"id"`
OrgID string `bun:"org_id"`
}
var users []*existingUser
if err := tx.NewSelect().Model(&users).Scan(ctx); err != nil {
return err
}
// 3. Create identity records for each user
now := time.Now()
for _, user := range users {
if _, err := tx.NewInsert().
Table("identity").
Value("id", "?", user.ID).
Value("status", "?", "active").
Value("org_id", "?", user.OrgID).
Value("created_at", "?", now).
Value("updated_at", "?", now).
Exec(ctx); err != nil {
return err
}
}
// 4. Get current table structure
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
// 5. Get existing indices to preserve them after recreation
indices, err := migration.sqlschema.GetIndices(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
// 6. Build all SQL statements
sqls := [][]byte{}
// Add identity_id column using AddColumn with ColumnName("id") as value
identityIdColumn := &sqlschema.Column{
Name: "identity_id",
DataType: sqlschema.DataTypeText,
Nullable: false,
}
sqls = append(sqls, migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, identityIdColumn, sqlschema.ColumnName("id"))...)
// Add FK constraint to table definition
table.ForeignKeyConstraints = append(table.ForeignKeyConstraints, &sqlschema.ForeignKeyConstraint{
ReferencingColumnName: sqlschema.ColumnName("identity_id"),
ReferencedTableName: sqlschema.TableName("identity"),
ReferencedColumnName: sqlschema.ColumnName("id"),
})
// Recreate table to apply FK constraint
sqls = append(sqls, migration.sqlschema.Operator().RecreateTable(table, uniqueConstraints)...)
// Recreate indices that were lost during table recreation
for _, index := range indices {
sqls = append(sqls, migration.sqlschema.Operator().CreateIndex(index)...)
}
// 7. Execute all SQL statements
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
// 8. Re-enable FK enforcement
return migration.sqlschema.ToggleFKEnforcement(ctx, db, true)
}
func (migration *updateUserIdentity) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -1,112 +0,0 @@
package sqlmigration
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
// Role name mapping from existing roles to managed role names
var existingRoleToManagedRole = map[string]string{
"ADMIN": "signoz-admin",
"EDITOR": "signoz-editor",
"VIEWER": "signoz-viewer",
}
type migrateUserRoles struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewMigrateUserRolesFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("migrate_user_roles"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return &migrateUserRoles{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
})
}
func (migration *migrateUserRoles) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *migrateUserRoles) Up(ctx context.Context, db *bun.DB) error {
// 1. Disable FK enforcement
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
// 2. Fetch existing users with their roles
type existingUser struct {
bun.BaseModel `bun:"table:users"`
ID string `bun:"id"`
Role string `bun:"role"`
}
var users []*existingUser
if err := tx.NewSelect().Model(&users).Scan(ctx); err != nil {
return err
}
// 3. Create identity_role records for each user using role_name directly
now := time.Now()
for _, user := range users {
roleName := existingRoleToManagedRole[user.Role]
if _, err := tx.NewInsert().
Table("identity_role").
Value("id", "?", valuer.GenerateUUID().StringValue()).
Value("identity_id", "?", user.ID).
Value("role_name", "?", roleName).
Value("created_at", "?", now).
Value("updated_at", "?", now).
Exec(ctx); err != nil {
return err
}
}
// 4. Get current table structure
table, _, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
// 5. Drop role column
roleColumn := &sqlschema.Column{Name: "role"}
dropColumnSQLs := migration.sqlschema.Operator().DropColumn(table, roleColumn)
for _, sql := range dropColumnSQLs {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
// 6. Re-enable FK enforcement
return migration.sqlschema.ToggleFKEnforcement(ctx, db, true)
}
func (migration *migrateUserRoles) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/analytics/segmentanalytics"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/identity"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/statsreporter"
@@ -40,9 +39,6 @@ type provider struct {
// used to get users
userGetter user.Getter
// used to get identity roles
identity identity.Module
// used to get tokenizer
tokenizer tokenizer.Tokenizer
@@ -59,9 +55,9 @@ type provider struct {
stopC chan struct{}
}
func NewFactory(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, identity identity.Module, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config] {
func NewFactory(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config] {
return factory.NewProviderFactory(factory.MustNewName("analytics"), func(ctx context.Context, settings factory.ProviderSettings, config statsreporter.Config) (statsreporter.StatsReporter, error) {
return New(ctx, settings, config, telemetryStore, collectors, orgGetter, userGetter, identity, tokenizer, build, analyticsConfig)
return New(ctx, settings, config, telemetryStore, collectors, orgGetter, userGetter, tokenizer, build, analyticsConfig)
})
}
@@ -73,7 +69,6 @@ func New(
collectors []statsreporter.StatsCollector,
orgGetter organization.Getter,
userGetter user.Getter,
identity identity.Module,
tokenizer tokenizer.Tokenizer,
build version.Build,
analyticsConfig analytics.Config,
@@ -92,7 +87,6 @@ func New(
collectors: collectors,
orgGetter: orgGetter,
userGetter: userGetter,
identity: identity,
analytics: analytics,
tokenizer: tokenizer,
build: build,
@@ -172,17 +166,6 @@ func (provider *provider) Report(ctx context.Context) error {
continue
}
// Get roles for all users in batch
identityIDs := make([]valuer.UUID, len(users))
for i, user := range users {
identityIDs[i] = user.IdentityID
}
rolesMap, err := provider.identity.GetRolesForIdentities(ctx, identityIDs)
if err != nil {
provider.settings.Logger().WarnContext(ctx, "failed to get roles", "error", err, "org_id", org.ID)
rolesMap = make(map[valuer.UUID][]types.Role)
}
maxLastObservedAtPerUserID, err := provider.tokenizer.ListMaxLastObservedAtByOrgID(ctx, org.ID)
if err != nil {
provider.settings.Logger().WarnContext(ctx, "failed to list max last observed at per user id", "error", err, "org_id", org.ID)
@@ -190,8 +173,7 @@ func (provider *provider) Report(ctx context.Context) error {
}
for _, user := range users {
roles := rolesMap[user.IdentityID]
traits := types.NewTraitsFromUser(user, roles)
traits := types.NewTraitsFromUser(user)
if maxLastObservedAt, ok := maxLastObservedAtPerUserID[user.ID]; ok {
traits["auth_token.last_observed_at.max.time"] = maxLastObservedAt.UTC()
traits["auth_token.last_observed_at.max.time_unix"] = maxLastObservedAt.Unix()

View File

@@ -5,7 +5,6 @@ import (
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -74,7 +73,7 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
cteArgs [][]any
)
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
if b.metricsStatementBuilder.CanShortCircuitDelta(query) {
// spatial_aggregation_cte directly for certain delta queries
if frag, args, err := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables); err != nil {
return nil, err
@@ -92,9 +91,8 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
return nil, err
} else if frag != "" {
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -124,16 +122,13 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
for _, g := range query.GroupBy {
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
if err != nil {
return "", nil, err
return "", []any{}, err
}
sb.SelectMore(col)
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -155,7 +150,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
Variables: variables,
}, start, end)
if err != nil {
return "", nil, err
return "", []any{}, err
}
}
if filterWhere != nil {
@@ -213,11 +208,8 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -286,10 +278,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
}
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
@@ -326,23 +315,25 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
case metrictypes.TimeAggregationIncrease:
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
@@ -357,15 +348,7 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
_ uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, []any, error) {
if query.Aggregations[0].SpaceAggregation.IsZero() {
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
)
}
) (string, []any) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("ts")
@@ -382,5 +365,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
}

View File

@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
},
expectedErr: nil,
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
},
expectedErr: nil,
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
},
expectedErr: nil,
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
},
expectedErr: nil,

View File

@@ -3,7 +3,6 @@ package telemetrymeter
import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
)
@@ -64,7 +63,7 @@ func AggregationColumnForSamplesTable(
temporality metrictypes.Temporality,
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) (string, error) {
) string {
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
var aggregationColumn string
switch temporality {
@@ -191,13 +190,5 @@ func AggregationColumnForSamplesTable(
}
}
if aggregationColumn == "" {
return "", errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
)
}
return aggregationColumn, nil
return aggregationColumn
}

View File

@@ -29,7 +29,13 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
if operator.IsStringSearchOperator() {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}
@@ -38,18 +44,6 @@ func (c *conditionBuilder) conditionFor(
return "", err
}
// TODO(srikanthccv): use the same data type collision handling when metrics schemas are updated
switch v := value.(type) {
case float64:
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
case []any:
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
if _, ok := v[0].(float64); ok {
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
}
}
}
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(tblFieldName, value), nil

View File

@@ -5,27 +5,67 @@ import (
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/flagger"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/types/featuretypes"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/slices"
)
const (
RateTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))`
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
IncreaseTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value, per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)`
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s, ((%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
RateMultiTemporalityTmpl = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1) OVER rate_window), (%s - lagInFrame(%s, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))) AS per_series_value`
RateWithInterpolation = `
CASE
WHEN row_number() OVER rate_window = 1 THEN
-- First row: try to interpolate using next value
CASE
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
-- Assume linear growth to next point
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
(leadInFrame(ts, 1) OVER rate_window - ts)
ELSE
0 -- No next value either, can't interpolate
END
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
-- Counter reset detected
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
ELSE
-- Normal case: calculate rate
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
(ts - lagInFrame(ts, 1) OVER rate_window)
END`
IncreaseMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
IncreaseWithInterpolation = `
CASE
WHEN row_number() OVER rate_window = 1 THEN
-- First row: try to interpolate using next value
CASE
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
-- Calculate the interpolated increase for this interval
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
(leadInFrame(ts, 1) OVER rate_window - ts)) *
(leadInFrame(ts, 1) OVER rate_window - ts)
ELSE
0 -- No next value either, can't interpolate
END
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
-- Counter reset detected: the increase is the current value
per_series_value
ELSE
-- Normal case: calculate increase
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
END`
)
type MetricQueryStatementBuilder struct {
@@ -107,6 +147,54 @@ func (b *MetricQueryStatementBuilder) Build(
return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
}
// Fastpath (no fingerprint grouping)
// canShortCircuitDelta returns true if we can use the optimized query
// for the given query
// This is used to avoid the group by fingerprint thus improving the performance
// for certain queries
// cases where we can short circuit:
// 1. time aggregation = (rate|increase) and space aggregation = sum
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
//
// 2. time aggregation = sum and space aggregation = sum
// - sum of sums is same as sum of all values
//
// 3. time aggregation = min and space aggregation = min
// - min of mins is same as min of all values
//
// 4. time aggregation = max and space aggregation = max
// - max of maxs is same as max of all values
//
// 5. special case exphist, there is no need for per series/fingerprint aggregation
// we can directly use the quantilesDDMerge function
//
// all of this is true only for delta metrics
func (b *MetricQueryStatementBuilder) CanShortCircuitDelta(q qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) bool {
if q.Aggregations[0].Temporality != metrictypes.Delta {
return false
}
ta := q.Aggregations[0].TimeAggregation
sa := q.Aggregations[0].SpaceAggregation
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) && sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
return true
}
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
return true
}
if q.Aggregations[0].Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
return true
}
return false
}
func (b *MetricQueryStatementBuilder) buildPipelineStatement(
ctx context.Context,
start, end uint64,
@@ -168,11 +256,10 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
return nil, err
}
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
if b.CanShortCircuitDelta(query) {
// spatial_aggregation_cte directly for certain delta queries
if frag, args, err := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); err != nil {
return nil, err
} else if frag != "" {
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -186,9 +273,8 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
}
// spatial_aggregation_cte
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
return nil, err
} else if frag != "" {
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
@@ -208,7 +294,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
timeSeriesCTE string,
timeSeriesCTEArgs []any,
) (string, []any, error) {
) (string, []any) {
stepSec := int64(query.StepInterval.Seconds())
sb := sqlbuilder.NewSelectBuilder()
@@ -221,15 +307,11 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol, err := AggregationColumnForSamplesTable(
aggCol := AggregationColumnForSamplesTable(
start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints,
)
if err != nil {
return "", nil, err
}
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -252,7 +334,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
}
func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
@@ -355,12 +437,8 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
}
@@ -383,7 +461,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
}
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
_ context.Context,
ctx context.Context,
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
timeSeriesCTE string,
@@ -401,10 +479,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
baseSb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
@@ -421,25 +496,36 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
// ! TODO (balanikaran) Get OrgID via function parameter instead of valuer.GenerateUUID()
interpolationEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureInterpolationEnabled, featuretypes.NewFlaggerEvaluationContext(valuer.GenerateUUID()))
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
if interpolationEnabled {
rateExpr = RateWithInterpolation
}
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", RateTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
case metrictypes.TimeAggregationIncrease:
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
if interpolationEnabled {
incExpr = IncreaseWithInterpolation
}
wrapped := sqlbuilder.NewSelectBuilder()
wrapped.Select("ts")
for _, g := range query.GroupBy {
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", IncreaseTmpl))
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
@@ -448,6 +534,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
}
}
// because RateInterpolation is not enabled anywhere due to some gaps in the logic wrt cache handling, it hasn't been considered for the multi temporality
func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
_ context.Context,
start, end uint64,
@@ -466,32 +553,18 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
aggForDeltaTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggForCumulativeTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if err != nil {
return "", nil, err
}
aggForDeltaTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
aggForCumulativeTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
aggForDeltaTemporality = fmt.Sprintf("%s/%d", aggForDeltaTemporality, stepSec)
}
switch query.Aggregations[0].TimeAggregation {
case metrictypes.TimeAggregationRate:
rateExpr := fmt.Sprintf(RateMultiTemporalityTmpl,
aggForDeltaTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality,
)
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, aggForCumulativeTemporality, aggForCumulativeTemporality, start)
sb.SelectMore(rateExpr)
case metrictypes.TimeAggregationIncrease:
increaseExpr := fmt.Sprintf(IncreaseMultiTemporality,
aggForDeltaTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
aggForCumulativeTemporality, aggForCumulativeTemporality,
)
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, start)
sb.SelectMore(increaseExpr)
default:
expr := fmt.Sprintf(OthersMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality)
@@ -519,14 +592,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
_ uint64,
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
_ map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, []any, error) {
if query.Aggregations[0].SpaceAggregation.IsZero() {
return "", nil, errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
)
}
) (string, []any) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("ts")
@@ -543,7 +609,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
}
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
@@ -575,7 +641,9 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
quantile,
))
sb.From("__spatial_aggregation_cte")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
for _, g := range query.GroupBy {
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
}
sb.GroupBy("ts")
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
@@ -591,8 +659,6 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
sb.Where(rewrittenExpr)
}
}
sb.OrderBy(querybuilder.GroupByKeys(query.GroupBy)...)
sb.OrderBy("ts")
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
return &qbtypes.Statement{Query: combined + q, Args: append(args, a...)}, nil

View File

@@ -50,7 +50,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
},
expectedErr: nil,
@@ -83,7 +83,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
},
expectedErr: nil,
@@ -116,7 +116,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
},
expectedErr: nil,
@@ -148,7 +148,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
},
expectedErr: nil,
@@ -181,7 +181,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
},
expectedErr: nil,
@@ -210,7 +210,7 @@ func TestStatementBuilder(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947390000), uint64(1747983420000), 0},
},
expectedErr: nil,

View File

@@ -3,7 +3,6 @@ package telemetrymetrics
import (
"time"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
)
@@ -169,7 +168,7 @@ func AggregationColumnForSamplesTable(
temporality metrictypes.Temporality,
timeAggregation metrictypes.TimeAggregation,
tableHints *metrictypes.MetricTableHints,
) (string, error) {
) string {
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
var aggregationColumn string
switch temporality {
@@ -299,12 +298,5 @@ func AggregationColumnForSamplesTable(
}
}
}
if aggregationColumn == "" {
return "", errors.Newf(
errors.TypeInvalidInput,
errors.CodeInvalidInput,
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
)
}
return aggregationColumn, nil
return aggregationColumn
}

View File

@@ -35,7 +35,13 @@ func (c *conditionBuilder) conditionFor(
sb *sqlbuilder.SelectBuilder,
) (string, error) {
if operator.IsStringSearchOperator() {
switch operator {
case qbtypes.FilterOperatorContains,
qbtypes.FilterOperatorNotContains,
qbtypes.FilterOperatorILike,
qbtypes.FilterOperatorNotILike,
qbtypes.FilterOperatorLike,
qbtypes.FilterOperatorNotLike:
value = querybuilder.FormatValueForContains(value)
}

View File

@@ -1,62 +0,0 @@
package identitytypes
import (
"time"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/uptrace/bun"
)
// IdentityStatus represents the status of an identity
type IdentityStatus string
const (
IdentityStatusActive IdentityStatus = "active"
IdentityStatusInactive IdentityStatus = "inactive"
)
// StorableIdentity represents the database entity for a user's identity
type StorableIdentity struct {
bun.BaseModel `bun:"table:identity"`
ID valuer.UUID `bun:"id,pk,type:text" json:"id"`
Status IdentityStatus `bun:"status" json:"status"`
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
CreatedAt time.Time `bun:"created_at" json:"createdAt"`
UpdatedAt time.Time `bun:"updated_at" json:"updatedAt"`
}
// StorableIdentityRole represents the relationship between identity and role in the database
type StorableIdentityRole struct {
bun.BaseModel `bun:"table:identity_role"`
ID valuer.UUID `bun:"id,pk,type:text" json:"id"`
IdentityID valuer.UUID `bun:"identity_id" json:"identityId"`
RoleName string `bun:"role_name" json:"roleName"`
CreatedAt time.Time `bun:"created_at" json:"createdAt"`
UpdatedAt time.Time `bun:"updated_at" json:"updatedAt"`
}
// NewStorableIdentity creates a new storable identity
func NewStorableIdentity(id valuer.UUID, orgID valuer.UUID) *StorableIdentity {
now := time.Now()
return &StorableIdentity{
ID: id,
Status: IdentityStatusActive,
OrgID: orgID,
CreatedAt: now,
UpdatedAt: now,
}
}
// NewStorableIdentityRole creates a new identity-role mapping
func NewStorableIdentityRole(identityID valuer.UUID, roleName string) *StorableIdentityRole {
now := time.Now()
return &StorableIdentityRole{
ID: valuer.GenerateUUID(),
IdentityID: identityID,
RoleName: roleName,
CreatedAt: now,
UpdatedAt: now,
}
}

View File

@@ -25,7 +25,23 @@ type Organization struct {
DisplayName string `bun:"display_name,type:text,notnull" json:"displayName"`
}
func NewOrganization(displayName string, name string) *Organization {
func NewOrganization(displayName string) *Organization {
id := valuer.GenerateUUID()
return &Organization{
Identifiable: Identifiable{
ID: id,
},
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
// Name: "default/main", TODO: take the call and uncomment this later
DisplayName: displayName,
Key: NewOrganizationKey(id),
}
}
func NewOrganizationWithName(name string) *Organization {
id := valuer.GenerateUUID()
return &Organization{
Identifiable: Identifiable{
@@ -36,7 +52,7 @@ func NewOrganization(displayName string, name string) *Organization {
UpdatedAt: time.Now(),
},
Name: name,
DisplayName: displayName,
DisplayName: name,
Key: NewOrganizationKey(id),
}
}

View File

@@ -152,9 +152,7 @@ func (f FilterOperator) IsStringSearchOperator() bool {
FilterOperatorILike,
FilterOperatorNotILike,
FilterOperatorLike,
FilterOperatorNotLike,
FilterOperatorRegexp,
FilterOperatorNotRegexp:
FilterOperatorNotLike:
return true
default:
return false

View File

@@ -3,7 +3,6 @@ package querybuildertypesv5
import (
"fmt"
"github.com/SigNoz/signoz/pkg/types/metrictypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -175,54 +174,3 @@ func (q *QueryBuilderQuery[T]) Normalize() {
}
}
// Fastpath (no fingerprint grouping)
// canShortCircuitDelta returns true if we can use the optimized query
// for the given query
// This is used to avoid the group by fingerprint thus improving the performance
// for certain queries
// cases where we can short circuit:
// 1. time aggregation = (rate|increase) and space aggregation = sum
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
//
// 2. time aggregation = sum and space aggregation = sum
// - sum of sums is same as sum of all values
//
// 3. time aggregation = min and space aggregation = min
// - min of mins is same as min of all values
//
// 4. time aggregation = max and space aggregation = max
// - max of maxs is same as max of all values
//
// 5. special case exphist, there is no need for per series/fingerprint aggregation
// we can directly use the quantilesDDMerge function
//
// all of this is true only for delta metrics
func CanShortCircuitDelta(metricAgg MetricAggregation) bool {
if metricAgg.Temporality != metrictypes.Delta {
return false
}
ta := metricAgg.TimeAggregation
sa := metricAgg.SpaceAggregation
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) &&
sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
return true
}
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
return true
}
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
return true
}
if metricAgg.Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
return true
}
return false
}

View File

@@ -12,7 +12,6 @@ import (
var (
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
ErrBetweenValuesType = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values of the number type")
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
)

View File

@@ -75,7 +75,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypeFormula:
var spec QueryBuilderFormula
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "formula spec"); err != nil {
return wrapUnmarshalError(err, "invalid formula spec: %v", err)
}
@@ -83,7 +83,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypeJoin:
var spec QueryBuilderJoin
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "join spec"); err != nil {
return wrapUnmarshalError(err, "invalid join spec: %v", err)
}
@@ -98,7 +98,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypePromQL:
var spec PromQuery
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for PromQuery
// TODO: use json.Unmarshal here after implementing custom unmarshaler for PromQuery
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "PromQL spec"); err != nil {
return wrapUnmarshalError(err, "invalid PromQL spec: %v", err)
}
@@ -106,7 +106,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
case QueryTypeClickHouseSQL:
var spec ClickHouseQuery
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
// TODO: use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "ClickHouse SQL spec"); err != nil {
return wrapUnmarshalError(err, "invalid ClickHouse SQL spec: %v", err)
}
@@ -439,7 +439,7 @@ func (r *QueryRangeRequest) GetQueriesSupportingZeroDefault() map[string]bool {
expr = strings.ToLower(expr)
// only pure additive/counting operations should default to zero,
// while statistical/analytical operations should show gaps when there's no data to analyze.
// TODO(srikanthccv): use newExprVisitor for getting the function used in the expression
// TODO: use newExprVisitor for getting the function used in the expression
if strings.HasPrefix(expr, "count(") ||
strings.HasPrefix(expr, "count_distinct(") ||
strings.HasPrefix(expr, "sum(") ||

View File

@@ -21,12 +21,3 @@ var (
// []Bucket (struct{Lower,Upper,Count float64}), example: histogram
RequestTypeDistribution = RequestType{valuer.NewString("distribution")}
)
// IsAggregation returns true for request types that produce aggregated results
// (time_series, scalar, distribution). For these types, fields like groupBy,
// having, aggregations, and orderBy (with aggregation key validation) are meaningful.
// For non-aggregation types (raw, raw_stream, trace), those fields are ignored
// and don't need to be validated.
func (r RequestType) IsAggregation() bool {
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
}

Some files were not shown because too many files have changed in this diff Show More