mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-18 23:12:36 +00:00
Compare commits
12 Commits
feat/cross
...
platform-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2abd3ddd8b | ||
|
|
9e7f97976b | ||
|
|
c3f35c8ddf | ||
|
|
ba4e93050e | ||
|
|
7f1d350ffe | ||
|
|
1d3134959d | ||
|
|
b86bd24dd9 | ||
|
|
4c49d45cbf | ||
|
|
9b3d3453b1 | ||
|
|
9d981d8a13 | ||
|
|
6de4520a95 | ||
|
|
f566909320 |
@@ -176,25 +176,6 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
|
||||
|
||||
### Unsere Projektbetreuer
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Dokumentation
|
||||
|
||||
28
README.md
28
README.md
@@ -221,34 +221,6 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
|
||||
|
||||
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
|
||||
|
||||
### Project maintainers
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
|
||||
- [Ekansh Gupta](https://github.com/eKuG)
|
||||
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Vikrant Gupta](https://github.com/vikrantgupta25)
|
||||
- [Sagar Rajput](https://github.com/SagarRajput-7)
|
||||
- [Shaheer Kochai](https://github.com/ahmadshaheer)
|
||||
- [Amlan Kumar Nandy](https://github.com/amlannandy)
|
||||
- [Sahil Khan](https://github.com/sawhil)
|
||||
- [Aditya Singh](https://github.com/aks07)
|
||||
- [Abhi Kumar](https://github.com/ahrefabhi)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
|
||||
@@ -187,25 +187,6 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
|
||||
|
||||
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
|
||||
|
||||
### 项目维护人员
|
||||
|
||||
#### 后端
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### 前端
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### 运维开发
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 文档
|
||||
|
||||
@@ -294,7 +294,6 @@ flagger:
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
@@ -309,3 +308,14 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
# deleted, updated, or have their password changed through the UI.
|
||||
enabled: false
|
||||
# The email address of the root user.
|
||||
email: ""
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
|
||||
@@ -4678,6 +4678,10 @@ components:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
identityId:
|
||||
type: string
|
||||
isRoot:
|
||||
type: boolean
|
||||
orgId:
|
||||
type: string
|
||||
role:
|
||||
|
||||
@@ -45,7 +45,7 @@ type APIHandler struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
RuleManager: opts.RulesManager,
|
||||
@@ -58,7 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -175,7 +175,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1542,6 +1542,10 @@ export interface TypesUserDTO {
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isRoot?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
@@ -9,74 +9,6 @@
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
padding: 0 8px;
|
||||
box-sizing: border-box;
|
||||
|
||||
.dashboard-breadcrumbs {
|
||||
width: 100%;
|
||||
height: 48px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
max-width: 80%;
|
||||
|
||||
.dashboard-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.dashboard-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.id-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0px 2px;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
height: 20px;
|
||||
|
||||
max-width: calc(100% - 120px);
|
||||
|
||||
span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 4px;
|
||||
}
|
||||
}
|
||||
.id-btn:hover {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dashboard-details {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -535,15 +467,6 @@
|
||||
.dashboard-description-container {
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
.dashboard-breadcrumbs {
|
||||
.dashboard-btn {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dashboard-details {
|
||||
.left-section {
|
||||
.dashboard-title {
|
||||
|
||||
@@ -16,9 +16,7 @@ import {
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ConfigureIcon from 'assets/Integrations/ConfigureIcon';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
@@ -27,7 +25,6 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import {
|
||||
Check,
|
||||
@@ -37,7 +34,6 @@ import {
|
||||
FolderKanban,
|
||||
Fullscreen,
|
||||
Globe,
|
||||
LayoutGrid,
|
||||
LockKeyhole,
|
||||
PenLine,
|
||||
X,
|
||||
@@ -51,6 +47,7 @@ import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import DashboardHeader from '../components/DashboardHeader/DashboardHeader';
|
||||
import DashboardGraphSlider from '../ComponentsSlider';
|
||||
import DashboardSettings from '../DashboardSettings';
|
||||
import { Base64Icons } from '../DashboardSettings/General/utils';
|
||||
@@ -71,7 +68,6 @@ interface DashboardDescriptionProps {
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { handle } = props;
|
||||
const {
|
||||
selectedDashboard,
|
||||
@@ -80,7 +76,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
layouts,
|
||||
setLayouts,
|
||||
isDashboardLocked,
|
||||
listSortOrder,
|
||||
setSelectedDashboard,
|
||||
handleToggleDashboardSlider,
|
||||
setSelectedRowWidgetId,
|
||||
@@ -292,17 +287,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
function goToListPage(): void {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}
|
||||
|
||||
const {
|
||||
data: publicDashboardResponse,
|
||||
isLoading: isLoadingPublicDashboardData,
|
||||
@@ -351,32 +335,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<Card className="dashboard-description-container">
|
||||
<div className="dashboard-header">
|
||||
<section className="dashboard-breadcrumbs">
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LayoutGrid size={14} />}
|
||||
className="dashboard-btn"
|
||||
onClick={(): void => goToListPage()}
|
||||
>
|
||||
Dashboard /
|
||||
</Button>
|
||||
<Button type="text" className="id-btn dashboard-name-btn">
|
||||
<img
|
||||
src={image}
|
||||
alt="dashboard-icon"
|
||||
style={{ height: '14px', width: '14px' }}
|
||||
/>
|
||||
{title}
|
||||
</Button>
|
||||
</section>
|
||||
|
||||
<HeaderRightSection
|
||||
enableAnnouncements={false}
|
||||
enableShare
|
||||
enableFeedback
|
||||
/>
|
||||
</div>
|
||||
<DashboardHeader />
|
||||
<section className="dashboard-details">
|
||||
<div className="left-section">
|
||||
<img src={image} alt="dashboard-img" className="dashboard-img" />
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
.overview-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 16px;
|
||||
|
||||
.overview-settings {
|
||||
border-radius: 3px;
|
||||
@@ -56,35 +55,6 @@
|
||||
border: 1px solid var(--bg-slate-400);
|
||||
background: var(--bg-ink-300);
|
||||
}
|
||||
|
||||
.cross-panel-sync-section {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
flex-wrap: wrap;
|
||||
|
||||
.cross-panel-sync-info {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex: 1;
|
||||
min-width: 200px;
|
||||
|
||||
.cross-panel-sync-title {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
.cross-panel-sync-description {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
.ant-radio-group {
|
||||
flex-shrink: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.overview-settings-footer {
|
||||
@@ -198,15 +168,6 @@
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.cross-panel-sync-section {
|
||||
.cross-panel-sync-title {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
.cross-panel-sync-description {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.overview-settings-footer {
|
||||
|
||||
@@ -1,15 +1,11 @@
|
||||
import { useEffect, useMemo, useState } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { Col, Input, Radio, Select, Space, Typography } from 'antd';
|
||||
import { Col, Input, Select, Space, Typography } from 'antd';
|
||||
import AddTags from 'container/DashboardContainer/DashboardSettings/General/AddTags';
|
||||
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import { isEqual } from 'lodash-es';
|
||||
import { Check, X } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import {
|
||||
CROSS_PANEL_SYNC_OPTIONS,
|
||||
CrossPanelSync,
|
||||
} from 'types/api/dashboard/getAll';
|
||||
|
||||
import { Button } from './styles';
|
||||
import { Base64Icons } from './utils';
|
||||
@@ -25,13 +21,8 @@ function GeneralDashboardSettings(): JSX.Element {
|
||||
|
||||
const selectedData = selectedDashboard?.data;
|
||||
|
||||
const {
|
||||
title = '',
|
||||
tags = [],
|
||||
description = '',
|
||||
image = Base64Icons[0],
|
||||
crossPanelSync = 'NONE',
|
||||
} = selectedData || {};
|
||||
const { title = '', tags = [], description = '', image = Base64Icons[0] } =
|
||||
selectedData || {};
|
||||
|
||||
const [updatedTitle, setUpdatedTitle] = useState<string>(title);
|
||||
const [updatedTags, setUpdatedTags] = useState<string[]>(tags || []);
|
||||
@@ -39,10 +30,6 @@ function GeneralDashboardSettings(): JSX.Element {
|
||||
description || '',
|
||||
);
|
||||
const [updatedImage, setUpdatedImage] = useState<string>(image);
|
||||
const [
|
||||
updatedCrossPanelSync,
|
||||
setUpdatedCrossPanelSync,
|
||||
] = useState<CrossPanelSync>(crossPanelSync);
|
||||
const [numberOfUnsavedChanges, setNumberOfUnsavedChanges] = useState<number>(
|
||||
0,
|
||||
);
|
||||
@@ -63,7 +50,6 @@ function GeneralDashboardSettings(): JSX.Element {
|
||||
tags: updatedTags,
|
||||
title: updatedTitle,
|
||||
image: updatedImage,
|
||||
crossPanelSync: updatedCrossPanelSync,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -79,13 +65,12 @@ function GeneralDashboardSettings(): JSX.Element {
|
||||
|
||||
useEffect(() => {
|
||||
let numberOfUnsavedChanges = 0;
|
||||
const initialValues = [title, description, tags, image, crossPanelSync];
|
||||
const initialValues = [title, description, tags, image];
|
||||
const updatedValues = [
|
||||
updatedTitle,
|
||||
updatedDescription,
|
||||
updatedTags,
|
||||
updatedImage,
|
||||
updatedCrossPanelSync,
|
||||
];
|
||||
initialValues.forEach((val, index) => {
|
||||
if (!isEqual(val, updatedValues[index])) {
|
||||
@@ -94,38 +79,21 @@ function GeneralDashboardSettings(): JSX.Element {
|
||||
});
|
||||
setNumberOfUnsavedChanges(numberOfUnsavedChanges);
|
||||
}, [
|
||||
crossPanelSync,
|
||||
description,
|
||||
image,
|
||||
tags,
|
||||
title,
|
||||
updatedCrossPanelSync,
|
||||
updatedDescription,
|
||||
updatedImage,
|
||||
updatedTags,
|
||||
updatedTitle,
|
||||
]);
|
||||
|
||||
const crossPanelSyncOptions = useMemo(() => {
|
||||
return CROSS_PANEL_SYNC_OPTIONS.map((value) => {
|
||||
const sanitizedValue = value.toLowerCase();
|
||||
const label =
|
||||
sanitizedValue === 'none'
|
||||
? 'No Sync'
|
||||
: sanitizedValue.charAt(0).toUpperCase() + sanitizedValue.slice(1);
|
||||
return {
|
||||
label,
|
||||
value,
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
|
||||
const discardHandler = (): void => {
|
||||
setUpdatedTitle(title);
|
||||
setUpdatedImage(image);
|
||||
setUpdatedTags(tags);
|
||||
setUpdatedDescription(description);
|
||||
setUpdatedCrossPanelSync(crossPanelSync);
|
||||
};
|
||||
|
||||
return (
|
||||
@@ -188,28 +156,6 @@ function GeneralDashboardSettings(): JSX.Element {
|
||||
</div>
|
||||
</Space>
|
||||
</Col>
|
||||
<Col className="overview-settings">
|
||||
<div className="cross-panel-sync-section">
|
||||
<div className="cross-panel-sync-info">
|
||||
<Typography className="cross-panel-sync-title">
|
||||
Cross-Panel Sync
|
||||
</Typography>
|
||||
<Typography.Text className="cross-panel-sync-description">
|
||||
Sync crosshair and tooltip across all the dashboard panels
|
||||
</Typography.Text>
|
||||
</div>
|
||||
<Radio.Group
|
||||
value={updatedCrossPanelSync}
|
||||
onChange={(e): void =>
|
||||
setUpdatedCrossPanelSync(e.target.value as CrossPanelSync)
|
||||
}
|
||||
optionType="button"
|
||||
buttonStyle="solid"
|
||||
options={crossPanelSyncOptions}
|
||||
data-testid="cross-panel-sync"
|
||||
/>
|
||||
</div>
|
||||
</Col>
|
||||
{numberOfUnsavedChanges > 0 && (
|
||||
<div className="overview-settings-footer">
|
||||
<div className="unsaved">
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
.dashboard-breadcrumbs {
|
||||
width: 100%;
|
||||
height: 48px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
max-width: 80%;
|
||||
|
||||
.dashboard-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.dashboard-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.id-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 0px 2px;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
height: 20px;
|
||||
|
||||
max-width: calc(100% - 120px);
|
||||
|
||||
span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 4px;
|
||||
}
|
||||
}
|
||||
.id-btn:hover {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-300);
|
||||
}
|
||||
|
||||
.dashboard-icon-image {
|
||||
height: 14px;
|
||||
width: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.dashboard-breadcrumbs {
|
||||
.dashboard-btn {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useCallback } from 'react';
|
||||
import { Button } from 'antd';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { LayoutGrid } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { Base64Icons } from '../../DashboardSettings/General/utils';
|
||||
|
||||
import './DashboardBreadcrumbs.styles.scss';
|
||||
|
||||
function DashboardBreadcrumbs(): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { selectedDashboard, listSortOrder } = useDashboard();
|
||||
|
||||
const selectedData = selectedDashboard
|
||||
? {
|
||||
...selectedDashboard.data,
|
||||
uuid: selectedDashboard.id,
|
||||
}
|
||||
: ({} as DashboardData);
|
||||
|
||||
const { title = '', image = Base64Icons[0] } = selectedData || {};
|
||||
|
||||
const goToListPage = useCallback(() => {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}, [listSortOrder, safeNavigate]);
|
||||
|
||||
return (
|
||||
<div className="dashboard-breadcrumbs">
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LayoutGrid size={14} />}
|
||||
className="dashboard-btn"
|
||||
onClick={goToListPage}
|
||||
>
|
||||
Dashboard /
|
||||
</Button>
|
||||
<Button type="text" className="id-btn dashboard-name-btn">
|
||||
<img src={image} alt="dashboard-icon" className="dashboard-icon-image" />
|
||||
{title}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DashboardBreadcrumbs;
|
||||
@@ -0,0 +1,15 @@
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
padding: 0 8px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { memo } from 'react';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
|
||||
import DashboardBreadcrumbs from './DashboardBreadcrumbs';
|
||||
|
||||
import './DashboardHeader.styles.scss';
|
||||
|
||||
function DashboardHeader(): JSX.Element {
|
||||
return (
|
||||
<div className="dashboard-header">
|
||||
<DashboardBreadcrumbs />
|
||||
<HeaderRightSection enableAnnouncements={false} enableShare enableFeedback />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(DashboardHeader);
|
||||
@@ -23,6 +23,7 @@ export default function ChartWrapper({
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
showTooltip = true,
|
||||
showLegend = true,
|
||||
canPinTooltip = false,
|
||||
syncMode,
|
||||
syncKey,
|
||||
@@ -36,6 +37,9 @@ export default function ChartWrapper({
|
||||
|
||||
const legendComponent = useCallback(
|
||||
(averageLegendWidth: number): React.ReactNode => {
|
||||
if (!showLegend) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Legend
|
||||
config={config}
|
||||
@@ -44,7 +48,7 @@ export default function ChartWrapper({
|
||||
/>
|
||||
);
|
||||
},
|
||||
[config, legendConfig.position],
|
||||
[config, legendConfig.position, showLegend],
|
||||
);
|
||||
|
||||
const renderTooltipCallback = useCallback(
|
||||
@@ -60,6 +64,7 @@ export default function ChartWrapper({
|
||||
return (
|
||||
<PlotContextProvider>
|
||||
<ChartLayout
|
||||
showLegend={showLegend}
|
||||
config={config}
|
||||
containerWidth={containerWidth}
|
||||
containerHeight={containerHeight}
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
} from 'lib/uPlotV2/components/types';
|
||||
|
||||
import { HistogramChartProps } from '../types';
|
||||
|
||||
export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
const {
|
||||
children,
|
||||
renderTooltip: customRenderTooltip,
|
||||
isQueriesMerged,
|
||||
...rest
|
||||
} = props;
|
||||
|
||||
const renderTooltip = useCallback(
|
||||
(props: TooltipRenderArgs): React.ReactNode => {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
[customRenderTooltip, rest.timezone, rest.yAxisUnit, rest.decimalPrecision],
|
||||
);
|
||||
|
||||
return (
|
||||
<ChartWrapper
|
||||
showLegend={!isQueriesMerged}
|
||||
{...rest}
|
||||
renderTooltip={renderTooltip}
|
||||
>
|
||||
{children}
|
||||
</ChartWrapper>
|
||||
);
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
import { PrecisionOption } from 'components/Graph/types';
|
||||
import { LegendConfig, TooltipRenderArgs } from 'lib/uPlotV2/components/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { CrossPanelSync } from 'types/api/dashboard/getAll';
|
||||
import { DashboardCursorSync } from 'lib/uPlotV2/plugins/TooltipPlugin/types';
|
||||
|
||||
interface BaseChartProps {
|
||||
width: number;
|
||||
height: number;
|
||||
showTooltip?: boolean;
|
||||
showLegend?: boolean;
|
||||
timezone: string;
|
||||
canPinTooltip?: boolean;
|
||||
yAxisUnit?: string;
|
||||
@@ -17,7 +18,8 @@ interface BaseChartProps {
|
||||
interface UPlotBasedChartProps {
|
||||
config: UPlotConfigBuilder;
|
||||
data: uPlot.AlignedData;
|
||||
syncMode?: CrossPanelSync;
|
||||
legendConfig: LegendConfig;
|
||||
syncMode?: DashboardCursorSync;
|
||||
syncKey?: string;
|
||||
plotRef?: (plot: uPlot | null) => void;
|
||||
onDestroy?: (plot: uPlot) => void;
|
||||
@@ -26,14 +28,20 @@ interface UPlotBasedChartProps {
|
||||
}
|
||||
|
||||
export interface TimeSeriesChartProps
|
||||
extends BaseChartProps,
|
||||
UPlotBasedChartProps {}
|
||||
|
||||
export interface HistogramChartProps
|
||||
extends BaseChartProps,
|
||||
UPlotBasedChartProps {
|
||||
legendConfig: LegendConfig;
|
||||
isQueriesMerged?: boolean;
|
||||
}
|
||||
|
||||
export interface BarChartProps extends BaseChartProps, UPlotBasedChartProps {
|
||||
legendConfig: LegendConfig;
|
||||
isStackedBarChart?: boolean;
|
||||
}
|
||||
|
||||
export type ChartProps = TimeSeriesChartProps | BarChartProps;
|
||||
export type ChartProps =
|
||||
| TimeSeriesChartProps
|
||||
| BarChartProps
|
||||
| HistogramChartProps;
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { useScrollWidgetIntoView } from '../useScrollWidgetIntoView';
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard');
|
||||
|
||||
type MockHTMLElement = {
|
||||
scrollIntoView: jest.Mock;
|
||||
focus: jest.Mock;
|
||||
};
|
||||
|
||||
function createMockElement(): MockHTMLElement {
|
||||
return {
|
||||
scrollIntoView: jest.fn(),
|
||||
focus: jest.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
describe('useScrollWidgetIntoView', () => {
|
||||
const mockedUseDashboard = useDashboard as jest.MockedFunction<
|
||||
typeof useDashboard
|
||||
>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('scrolls into view and focuses when toScrollWidgetId matches widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'widget-id',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
expect(mockElement.scrollIntoView).toHaveBeenCalledWith({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
expect(mockElement.focus).toHaveBeenCalled();
|
||||
expect(setToScrollWidgetId).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('does nothing when toScrollWidgetId does not match widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'other-widget',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
expect(mockElement.scrollIntoView).not.toHaveBeenCalled();
|
||||
expect(mockElement.focus).not.toHaveBeenCalled();
|
||||
expect(setToScrollWidgetId).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,26 @@
|
||||
import { RefObject, useEffect } from 'react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
/**
|
||||
* Scrolls the given widget container into view when the dashboard
|
||||
* requests it via `toScrollWidgetId`.
|
||||
*
|
||||
* Intended for use in panel components that render a single widget.
|
||||
*/
|
||||
export function useScrollWidgetIntoView<T extends HTMLElement>(
|
||||
widgetId: string,
|
||||
widgetContainerRef: RefObject<T>,
|
||||
): void {
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widgetId) {
|
||||
widgetContainerRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
widgetContainerRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widgetId, widgetContainerRef]);
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
import { useMemo } from 'react';
|
||||
import cx from 'classnames';
|
||||
import { calculateChartDimensions } from 'container/DashboardContainer/visualization/charts/utils';
|
||||
import { MAX_LEGEND_WIDTH } from 'lib/uPlotV2/components/Legend/Legend';
|
||||
import { LegendConfig, LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
|
||||
import './ChartLayout.styles.scss';
|
||||
|
||||
export interface ChartLayoutProps {
|
||||
showLegend?: boolean;
|
||||
legendComponent: (legendPerSet: number) => React.ReactNode;
|
||||
children: (props: {
|
||||
chartWidth: number;
|
||||
@@ -20,6 +22,7 @@ export interface ChartLayoutProps {
|
||||
config: UPlotConfigBuilder;
|
||||
}
|
||||
export default function ChartLayout({
|
||||
showLegend = true,
|
||||
legendComponent,
|
||||
children,
|
||||
layoutChildren,
|
||||
@@ -30,6 +33,15 @@ export default function ChartLayout({
|
||||
}: ChartLayoutProps): JSX.Element {
|
||||
const chartDimensions = useMemo(
|
||||
() => {
|
||||
if (!showLegend) {
|
||||
return {
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
legendWidth: 0,
|
||||
legendHeight: 0,
|
||||
averageLegendWidth: MAX_LEGEND_WIDTH,
|
||||
};
|
||||
}
|
||||
const legendItemsMap = config.getLegendItems();
|
||||
const seriesLabels = Object.values(legendItemsMap)
|
||||
.map((item) => item.label)
|
||||
@@ -42,7 +54,7 @@ export default function ChartLayout({
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[containerWidth, containerHeight, legendConfig],
|
||||
[containerWidth, containerHeight, legendConfig, showLegend],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -60,15 +72,17 @@ export default function ChartLayout({
|
||||
averageLegendWidth: chartDimensions.averageLegendWidth,
|
||||
})}
|
||||
</div>
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
{showLegend && (
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{layoutChildren}
|
||||
</div>
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import ContextMenu from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
@@ -13,7 +13,6 @@ import { getTimeRange } from 'utils/getTimeRange';
|
||||
import BarChart from '../../charts/BarChart/BarChart';
|
||||
import ChartManager from '../../components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from '../../hooks/usePanelContextMenu';
|
||||
import { PanelMode } from '../types';
|
||||
import { prepareBarPanelConfig, prepareBarPanelData } from './utils';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
@@ -28,11 +27,6 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const uPlotRef = useRef<uPlot | null>(null);
|
||||
const {
|
||||
toScrollWidgetId,
|
||||
setToScrollWidgetId,
|
||||
selectedDashboard,
|
||||
} = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
@@ -41,16 +35,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
@@ -122,15 +107,6 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
onToggleModelHandler,
|
||||
]);
|
||||
|
||||
const crossPanelSync = selectedDashboard?.data?.crossPanelSync ?? 'NONE';
|
||||
|
||||
const cursorSyncMode = useMemo(() => {
|
||||
if (panelMode !== PanelMode.DASHBOARD_VIEW) {
|
||||
return 'NONE';
|
||||
}
|
||||
return crossPanelSync;
|
||||
}, [panelMode, crossPanelSync]);
|
||||
|
||||
const onPlotDestroy = useCallback(() => {
|
||||
uPlotRef.current = null;
|
||||
}, []);
|
||||
@@ -151,7 +127,6 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
onDestroy={onPlotDestroy}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
syncMode={cursorSyncMode}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { DashboardCursorSync } from 'lib/uPlotV2/plugins/TooltipPlugin/types';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import Histogram from '../../charts/Histogram/Histogram';
|
||||
import ChartManager from '../../components/ChartManager/ChartManager';
|
||||
import {
|
||||
prepareHistogramPanelConfig,
|
||||
prepareHistogramPanelData,
|
||||
} from './utils';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
|
||||
function HistogramPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const {
|
||||
panelMode,
|
||||
queryResponse,
|
||||
widget,
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const uPlotRef = useRef<uPlot | null>(null);
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
const config = useMemo(() => {
|
||||
return prepareHistogramPanelConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
panelMode,
|
||||
});
|
||||
}, [widget, isDarkMode, queryResponse?.data?.payload, panelMode]);
|
||||
|
||||
const chartData = useMemo(() => {
|
||||
if (!queryResponse?.data?.payload) {
|
||||
return [];
|
||||
}
|
||||
return prepareHistogramPanelData({
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
bucketWidth: widget?.bucketWidth,
|
||||
bucketCount: widget?.bucketCount,
|
||||
mergeAllActiveQueries: widget?.mergeAllActiveQueries,
|
||||
});
|
||||
}, [
|
||||
queryResponse?.data?.payload,
|
||||
widget?.bucketWidth,
|
||||
widget?.bucketCount,
|
||||
widget?.mergeAllActiveQueries,
|
||||
]);
|
||||
|
||||
const layoutChildren = useMemo(() => {
|
||||
if (!isFullViewMode || widget.mergeAllActiveQueries) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<ChartManager
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
}, [
|
||||
isFullViewMode,
|
||||
config,
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
widget.mergeAllActiveQueries,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="panel-container" ref={graphRef}>
|
||||
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
|
||||
<Histogram
|
||||
config={config}
|
||||
legendConfig={{
|
||||
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
|
||||
}}
|
||||
plotRef={(plot: uPlot | null): void => {
|
||||
uPlotRef.current = plot;
|
||||
}}
|
||||
onDestroy={(): void => {
|
||||
uPlotRef.current = null;
|
||||
}}
|
||||
isQueriesMerged={widget.mergeAllActiveQueries}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
syncMode={DashboardCursorSync.Crosshair}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
height={containerDimensions.height}
|
||||
layoutChildren={layoutChildren}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HistogramPanel;
|
||||
@@ -0,0 +1,223 @@
|
||||
/* eslint-disable simple-import-sort/imports */
|
||||
import type { UseQueryResult } from 'react-query';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { PanelMode } from 'container/DashboardContainer/visualization/panels/types';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
MetricQueryRangeSuccessResponse,
|
||||
MetricRangePayloadProps,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import HistogramPanel from '../HistogramPanel';
|
||||
import { HistogramChartProps } from 'container/DashboardContainer/visualization/charts/types';
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: jest.fn().mockReturnValue({ width: 800, height: 400 }),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: jest.fn().mockReturnValue(false),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
__esModule: true,
|
||||
// Provide a no-op provider component so AllTheProviders can render
|
||||
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
|
||||
<>{children}</>
|
||||
),
|
||||
// And mock the hook used by HistogramPanel
|
||||
useTimezone: jest.fn().mockReturnValue({
|
||||
timezone: { value: 'UTC' },
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView',
|
||||
() => ({
|
||||
useScrollWidgetIntoView: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/charts/Histogram/Histogram',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: (props: HistogramChartProps): JSX.Element => (
|
||||
<div data-testid="histogram-chart">
|
||||
<div data-testid="histogram-props">
|
||||
{JSON.stringify({
|
||||
legendPosition: props.legendConfig?.position,
|
||||
isQueriesMerged: props.isQueriesMerged,
|
||||
yAxisUnit: props.yAxisUnit,
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
})}
|
||||
</div>
|
||||
{props.layoutChildren}
|
||||
</div>
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/components/ChartManager/ChartManager',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="chart-manager">ChartManager</div>
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
function createQueryResponse(
|
||||
payloadOverrides: Partial<MetricRangePayloadProps> = {},
|
||||
): { data: { payload: MetricRangePayloadProps } } {
|
||||
const basePayload: MetricRangePayloadProps = {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'A',
|
||||
legend: 'Series A',
|
||||
values: [
|
||||
[1, '10'],
|
||||
[2, '20'],
|
||||
],
|
||||
},
|
||||
],
|
||||
resultType: 'matrix',
|
||||
newResult: {
|
||||
data: {
|
||||
result: [],
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
data: {
|
||||
payload: {
|
||||
...basePayload,
|
||||
...payloadOverrides,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
type WidgetLike = {
|
||||
id: string;
|
||||
yAxisUnit: string;
|
||||
decimalPrecision: number;
|
||||
legendPosition: LegendPosition;
|
||||
mergeAllActiveQueries: boolean;
|
||||
};
|
||||
|
||||
function createWidget(overrides: Partial<WidgetLike> = {}): WidgetLike {
|
||||
return {
|
||||
id: 'widget-id',
|
||||
yAxisUnit: 'ms',
|
||||
decimalPrecision: 2,
|
||||
legendPosition: LegendPosition.BOTTOM,
|
||||
mergeAllActiveQueries: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('HistogramPanel', () => {
|
||||
it('renders Histogram when container has dimensions', () => {
|
||||
const widget = (createWidget() as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode={false}
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('histogram-chart')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('passes legend position and other props to Histogram', () => {
|
||||
const widget = (createWidget({
|
||||
legendPosition: LegendPosition.RIGHT,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode={false}
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
const propsJson = screen.getByTestId('histogram-props').textContent || '{}';
|
||||
const parsed = JSON.parse(propsJson);
|
||||
|
||||
expect(parsed.legendPosition).toBe(LegendPosition.RIGHT);
|
||||
expect(parsed.yAxisUnit).toBe('ms');
|
||||
expect(parsed.decimalPrecision).toBe(2);
|
||||
});
|
||||
|
||||
it('renders ChartManager in full view when queries are not merged', () => {
|
||||
const widget = (createWidget({
|
||||
mergeAllActiveQueries: false,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('chart-manager')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render ChartManager when queries are merged', () => {
|
||||
const widget = (createWidget({
|
||||
mergeAllActiveQueries: true,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('chart-manager')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,231 @@
|
||||
import { histogramBucketSizes } from '@grafana/data';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||
import { getLegend } from 'lib/dashboard/getQueryResults';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import { DrawStyle } from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { AlignedData } from 'uplot';
|
||||
import { incrRoundDn, roundDecimals } from 'utils/round';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
import { buildBaseConfig } from '../utils/baseConfigBuilder';
|
||||
import {
|
||||
buildHistogramBuckets,
|
||||
mergeAlignedDataTables,
|
||||
prependNullBinToFirstHistogramSeries,
|
||||
replaceUndefinedWithNullInAlignedData,
|
||||
} from '../utils/histogram';
|
||||
|
||||
export interface PrepareHistogramPanelDataParams {
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
bucketWidth?: number;
|
||||
bucketCount?: number;
|
||||
mergeAllActiveQueries?: boolean;
|
||||
}
|
||||
|
||||
const BUCKET_OFFSET = 0;
|
||||
const HIST_SORT = (a: number, b: number): number => a - b;
|
||||
|
||||
function extractNumericValues(
|
||||
result: MetricRangePayloadProps['data']['result'],
|
||||
): number[] {
|
||||
const values: number[] = [];
|
||||
for (const item of result) {
|
||||
for (const [, valueStr] of item.values) {
|
||||
values.push(Number.parseFloat(valueStr) || 0);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
function computeSmallestDelta(sortedValues: number[]): number {
|
||||
if (sortedValues.length <= 1) {
|
||||
return 0;
|
||||
}
|
||||
let smallest = Infinity;
|
||||
for (let i = 1; i < sortedValues.length; i++) {
|
||||
const delta = sortedValues[i] - sortedValues[i - 1];
|
||||
if (delta > 0) {
|
||||
smallest = Math.min(smallest, delta);
|
||||
}
|
||||
}
|
||||
return smallest === Infinity ? 0 : smallest;
|
||||
}
|
||||
|
||||
function selectBucketSize({
|
||||
range,
|
||||
bucketCount,
|
||||
smallestDelta,
|
||||
bucketWidthOverride,
|
||||
}: {
|
||||
range: number;
|
||||
bucketCount: number;
|
||||
smallestDelta: number;
|
||||
bucketWidthOverride?: number;
|
||||
}): number {
|
||||
if (bucketWidthOverride != null && bucketWidthOverride > 0) {
|
||||
return bucketWidthOverride;
|
||||
}
|
||||
const targetSize = range / bucketCount;
|
||||
for (const candidate of histogramBucketSizes) {
|
||||
if (targetSize < candidate && candidate >= smallestDelta) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function buildFrames(
|
||||
result: MetricRangePayloadProps['data']['result'],
|
||||
mergeAllActiveQueries: boolean,
|
||||
): number[][] {
|
||||
const frames: number[][] = result.map((item) =>
|
||||
item.values.map(([, valueStr]) => Number.parseFloat(valueStr) || 0),
|
||||
);
|
||||
if (mergeAllActiveQueries && frames.length > 1) {
|
||||
const first = frames[0];
|
||||
for (let i = 1; i < frames.length; i++) {
|
||||
first.push(...frames[i]);
|
||||
frames[i] = [];
|
||||
}
|
||||
}
|
||||
return frames;
|
||||
}
|
||||
|
||||
export function prepareHistogramPanelData({
|
||||
apiResponse,
|
||||
bucketWidth,
|
||||
bucketCount: bucketCountProp = DEFAULT_BUCKET_COUNT,
|
||||
mergeAllActiveQueries = false,
|
||||
}: PrepareHistogramPanelDataParams): AlignedData {
|
||||
const bucketCount = bucketCountProp ?? DEFAULT_BUCKET_COUNT;
|
||||
const result = apiResponse.data.result;
|
||||
|
||||
const seriesValues = extractNumericValues(result);
|
||||
if (seriesValues.length === 0) {
|
||||
return [[]];
|
||||
}
|
||||
|
||||
const sorted = [...seriesValues].sort((a, b) => a - b);
|
||||
const min = sorted[0];
|
||||
const max = sorted[sorted.length - 1];
|
||||
const range = max - min;
|
||||
const smallestDelta = computeSmallestDelta(sorted);
|
||||
let bucketSize = selectBucketSize({
|
||||
range,
|
||||
bucketCount,
|
||||
smallestDelta,
|
||||
bucketWidthOverride: bucketWidth,
|
||||
});
|
||||
if (bucketSize <= 0) {
|
||||
bucketSize = range > 0 ? range / bucketCount : 1;
|
||||
}
|
||||
|
||||
const getBucket = (v: number): number =>
|
||||
roundDecimals(incrRoundDn(v - BUCKET_OFFSET, bucketSize) + BUCKET_OFFSET, 9);
|
||||
|
||||
const frames = buildFrames(result, mergeAllActiveQueries);
|
||||
const histogramsPerSeries: AlignedData[] = frames
|
||||
.filter((frame) => frame.length > 0)
|
||||
.map((frame) => buildHistogramBuckets(frame, getBucket, HIST_SORT));
|
||||
|
||||
if (histogramsPerSeries.length === 0) {
|
||||
return [[]];
|
||||
}
|
||||
|
||||
const mergedHistogramData = mergeAlignedDataTables(histogramsPerSeries);
|
||||
replaceUndefinedWithNullInAlignedData(mergedHistogramData);
|
||||
prependNullBinToFirstHistogramSeries(mergedHistogramData, bucketSize);
|
||||
return mergedHistogramData;
|
||||
}
|
||||
|
||||
export function prepareHistogramPanelConfig({
|
||||
widget,
|
||||
apiResponse,
|
||||
panelMode,
|
||||
isDarkMode,
|
||||
}: {
|
||||
widget: Widgets;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
panelMode: PanelMode;
|
||||
isDarkMode: boolean;
|
||||
}): UPlotConfigBuilder {
|
||||
const builder = buildBaseConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
apiResponse,
|
||||
panelMode,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
});
|
||||
builder.setCursor({
|
||||
drag: {
|
||||
x: false,
|
||||
y: false,
|
||||
setScale: true,
|
||||
},
|
||||
focus: {
|
||||
prox: 1e3,
|
||||
},
|
||||
});
|
||||
|
||||
builder.addScale({
|
||||
scaleKey: 'x',
|
||||
time: false,
|
||||
auto: true,
|
||||
});
|
||||
builder.addScale({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
auto: true,
|
||||
min: 0,
|
||||
});
|
||||
|
||||
const currentQuery = widget.query;
|
||||
const mergeAllActiveQueries = widget?.mergeAllActiveQueries ?? false;
|
||||
|
||||
// When merged, data has only one y column; add one series to match. Otherwise add one per result.
|
||||
if (mergeAllActiveQueries) {
|
||||
builder.addSeries({
|
||||
label: '',
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: false,
|
||||
barWidthFactor: 1,
|
||||
pointSize: 5,
|
||||
lineColor: '#3f5ecc',
|
||||
fillColor: '#4E74F8',
|
||||
isDarkMode,
|
||||
});
|
||||
} else {
|
||||
apiResponse.data.result.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
series.legend || '',
|
||||
);
|
||||
|
||||
const label = currentQuery
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
builder.addSeries({
|
||||
label: label,
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: false,
|
||||
barWidthFactor: 1,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
@@ -2,19 +2,18 @@ import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
|
||||
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { ContextMenu } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
import { getTimeRange } from 'utils/getTimeRange';
|
||||
|
||||
import { prepareChartData, prepareUPlotConfig } from '../TimeSeriesPanel/utils';
|
||||
import { PanelMode } from '../types';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
|
||||
@@ -27,11 +26,6 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const {
|
||||
toScrollWidgetId,
|
||||
setToScrollWidgetId,
|
||||
selectedDashboard,
|
||||
} = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
@@ -40,16 +34,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
@@ -101,15 +86,6 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
timezone,
|
||||
]);
|
||||
|
||||
const crossPanelSync = selectedDashboard?.data?.crossPanelSync ?? 'NONE';
|
||||
|
||||
const cursorSyncMode = useMemo(() => {
|
||||
if (panelMode !== PanelMode.DASHBOARD_VIEW) {
|
||||
return 'NONE';
|
||||
}
|
||||
return crossPanelSync;
|
||||
}, [panelMode, crossPanelSync]);
|
||||
|
||||
const layoutChildren = useMemo(() => {
|
||||
if (!isFullViewMode) {
|
||||
return null;
|
||||
@@ -140,7 +116,6 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
}}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
syncMode={cursorSyncMode}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
|
||||
@@ -19,9 +19,9 @@ export interface BaseConfigBuilderProps {
|
||||
widget: Widgets;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
isDarkMode: boolean;
|
||||
onClick: OnClickPluginOpts['onClick'];
|
||||
onDragSelect: (startTime: number, endTime: number) => void;
|
||||
timezone: Timezone;
|
||||
onClick?: OnClickPluginOpts['onClick'];
|
||||
onDragSelect?: (startTime: number, endTime: number) => void;
|
||||
timezone?: Timezone;
|
||||
panelMode: PanelMode;
|
||||
panelType: PANEL_TYPES;
|
||||
minTimeScale?: number;
|
||||
@@ -40,8 +40,10 @@ export function buildBaseConfig({
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
}: BaseConfigBuilderProps): UPlotConfigBuilder {
|
||||
const tzDate = (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
|
||||
const tzDate = timezone
|
||||
? (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value)
|
||||
: undefined;
|
||||
|
||||
const builder = new UPlotConfigBuilder({
|
||||
onDragSelect,
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
import {
|
||||
NULL_EXPAND,
|
||||
NULL_REMOVE,
|
||||
NULL_RETAIN,
|
||||
} from 'container/PanelWrapper/constants';
|
||||
import { AlignedData } from 'uplot';
|
||||
|
||||
/**
|
||||
* Expands contiguous runs of `null` values to the left and right of their
|
||||
* original positions so that visual gaps in the series are continuous.
|
||||
*
|
||||
* This is used when `NULL_EXPAND` mode is selected while joining series.
|
||||
*/
|
||||
function propagateNullsAcrossNeighbors(
|
||||
seriesValues: Array<number | null>,
|
||||
nullIndices: number[],
|
||||
alignedLength: number,
|
||||
): void {
|
||||
for (
|
||||
let i = 0, currentIndex, lastExpandedNullIndex = -1;
|
||||
i < nullIndices.length;
|
||||
i++
|
||||
) {
|
||||
const nullIndex = nullIndices[i];
|
||||
|
||||
if (nullIndex > lastExpandedNullIndex) {
|
||||
// expand left until we hit a non-null value
|
||||
currentIndex = nullIndex - 1;
|
||||
while (currentIndex >= 0 && seriesValues[currentIndex] == null) {
|
||||
seriesValues[currentIndex--] = null;
|
||||
}
|
||||
|
||||
// expand right until we hit a non-null value
|
||||
currentIndex = nullIndex + 1;
|
||||
while (currentIndex < alignedLength && seriesValues[currentIndex] == null) {
|
||||
seriesValues[(lastExpandedNullIndex = currentIndex++)] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges multiple uPlot `AlignedData` tables into a single aligned table.
|
||||
*
|
||||
* - Merges and sorts all distinct x-values from each table.
|
||||
* - Re-aligns every series onto the merged x-axis.
|
||||
* - Applies per-series null handling (`NULL_REMOVE`, `NULL_RETAIN`, `NULL_EXPAND`).
|
||||
*/
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
export function mergeAlignedDataTables(
|
||||
alignedTables: AlignedData[],
|
||||
nullModes?: number[][],
|
||||
): AlignedData {
|
||||
let mergedXValues: Set<number>;
|
||||
|
||||
// eslint-disable-next-line prefer-const
|
||||
mergedXValues = new Set();
|
||||
|
||||
// Collect all unique x-values from every table.
|
||||
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
|
||||
const table = alignedTables[tableIndex];
|
||||
const xValues = table[0];
|
||||
const xLength = xValues.length;
|
||||
|
||||
for (let i = 0; i < xLength; i++) {
|
||||
mergedXValues.add(xValues[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Sorted, merged x-axis used by the final result.
|
||||
const alignedData: (number | null | undefined)[][] = [
|
||||
Array.from(mergedXValues).sort((a, b) => a - b),
|
||||
];
|
||||
|
||||
const alignedLength = alignedData[0].length;
|
||||
|
||||
// Map from x-value to its index in the merged x-axis.
|
||||
const xValueToIndexMap = new Map<number, number>();
|
||||
|
||||
for (let i = 0; i < alignedLength; i++) {
|
||||
xValueToIndexMap.set(alignedData[0][i] as number, i);
|
||||
}
|
||||
|
||||
// Re-align all series from all tables onto the merged x-axis.
|
||||
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
|
||||
const table = alignedTables[tableIndex];
|
||||
const xValues = table[0];
|
||||
|
||||
for (let seriesIndex = 1; seriesIndex < table.length; seriesIndex++) {
|
||||
const seriesValues = table[seriesIndex];
|
||||
|
||||
const alignedSeriesValues = Array(alignedLength).fill(undefined);
|
||||
|
||||
const nullHandlingMode = nullModes
|
||||
? nullModes[tableIndex][seriesIndex]
|
||||
: NULL_RETAIN;
|
||||
|
||||
const nullIndices: number[] = [];
|
||||
|
||||
for (let i = 0; i < seriesValues.length; i++) {
|
||||
const valueAtPoint = seriesValues[i];
|
||||
const alignedIndex = xValueToIndexMap.get(xValues[i]);
|
||||
|
||||
if (alignedIndex == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (valueAtPoint === null) {
|
||||
if (nullHandlingMode !== NULL_REMOVE) {
|
||||
alignedSeriesValues[alignedIndex] = valueAtPoint;
|
||||
|
||||
if (nullHandlingMode === NULL_EXPAND) {
|
||||
nullIndices.push(alignedIndex);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
alignedSeriesValues[alignedIndex] = valueAtPoint;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally expand nulls to visually preserve gaps.
|
||||
propagateNullsAcrossNeighbors(
|
||||
alignedSeriesValues,
|
||||
nullIndices,
|
||||
alignedLength,
|
||||
);
|
||||
|
||||
alignedData.push(alignedSeriesValues);
|
||||
}
|
||||
}
|
||||
|
||||
return alignedData as AlignedData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds histogram buckets from raw values.
|
||||
*
|
||||
* - Each value is mapped into a bucket via `getBucketForValue`.
|
||||
* - Counts how many values fall into each bucket.
|
||||
* - Optionally sorts buckets using the provided comparator.
|
||||
*/
|
||||
export function buildHistogramBuckets(
|
||||
values: number[],
|
||||
getBucketForValue: (value: number) => number,
|
||||
sortBuckets?: ((a: number, b: number) => number) | null,
|
||||
): AlignedData {
|
||||
const bucketMap = new Map<number, { value: number; count: number }>();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
let value = values[i];
|
||||
|
||||
if (value != null) {
|
||||
value = getBucketForValue(value);
|
||||
}
|
||||
|
||||
const bucket = bucketMap.get(value);
|
||||
|
||||
if (bucket) {
|
||||
bucket.count++;
|
||||
} else {
|
||||
bucketMap.set(value, { value, count: 1 });
|
||||
}
|
||||
}
|
||||
|
||||
const buckets = [...bucketMap.values()];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
|
||||
sortBuckets && buckets.sort((a, b) => sortBuckets(a.value, b.value));
|
||||
|
||||
const bucketValues = Array(buckets.length);
|
||||
const bucketCounts = Array(buckets.length);
|
||||
|
||||
for (let i = 0; i < buckets.length; i++) {
|
||||
bucketValues[i] = buckets[i].value;
|
||||
bucketCounts[i] = buckets[i].count;
|
||||
}
|
||||
|
||||
return [bucketValues, bucketCounts];
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates an `AlignedData` instance, replacing all `undefined` entries
|
||||
* with explicit `null` values so uPlot treats them as gaps.
|
||||
*/
|
||||
export function replaceUndefinedWithNullInAlignedData(
|
||||
data: AlignedData,
|
||||
): AlignedData {
|
||||
const seriesList = data as (number | null | undefined)[][];
|
||||
for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex++) {
|
||||
for (
|
||||
let pointIndex = 0;
|
||||
pointIndex < seriesList[seriesIndex].length;
|
||||
pointIndex++
|
||||
) {
|
||||
if (seriesList[seriesIndex][pointIndex] === undefined) {
|
||||
seriesList[seriesIndex][pointIndex] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the first histogram series has a leading "empty" bin so that
|
||||
* all series line up visually when rendered as bars.
|
||||
*
|
||||
* - Prepends a new x-value (first x - `bucketSize`) to the first series.
|
||||
* - Prepends `null` to all subsequent series at the same index.
|
||||
*/
|
||||
export function prependNullBinToFirstHistogramSeries(
|
||||
alignedData: AlignedData,
|
||||
bucketSize: number,
|
||||
): void {
|
||||
const seriesList = alignedData as (number | null)[][];
|
||||
if (
|
||||
seriesList.length > 0 &&
|
||||
seriesList[0].length > 0 &&
|
||||
seriesList[0][0] !== null
|
||||
) {
|
||||
seriesList[0].unshift(seriesList[0][0] - bucketSize);
|
||||
for (let seriesIndex = 1; seriesIndex < seriesList.length; seriesIndex++) {
|
||||
seriesList[seriesIndex].unshift(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import BarPanel from 'container/DashboardContainer/visualization/panels/BarPanel/BarPanel';
|
||||
import HistogramPanel from 'container/DashboardContainer/visualization/panels/HistogramPanel/HistogramPanel';
|
||||
|
||||
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
|
||||
import HistogramPanelWrapper from './HistogramPanelWrapper';
|
||||
import ListPanelWrapper from './ListPanelWrapper';
|
||||
import PiePanelWrapper from './PiePanelWrapper';
|
||||
import TablePanelWrapper from './TablePanelWrapper';
|
||||
@@ -17,7 +17,7 @@ export const PanelTypeVsPanelWrapper = {
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: null,
|
||||
[PANEL_TYPES.PIE]: PiePanelWrapper,
|
||||
[PANEL_TYPES.BAR]: BarPanel,
|
||||
[PANEL_TYPES.HISTOGRAM]: HistogramPanelWrapper,
|
||||
[PANEL_TYPES.HISTOGRAM]: HistogramPanel,
|
||||
};
|
||||
|
||||
export const DEFAULT_BUCKET_COUNT = 30;
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
import { HistogramTooltipProps } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
|
||||
export default function HistogramTooltip(
|
||||
props: HistogramTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} showTooltipHeader={false} />;
|
||||
}
|
||||
@@ -16,12 +16,16 @@ export default function Tooltip({
|
||||
uPlotInstance,
|
||||
timezone,
|
||||
content,
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const tooltipContent = content ?? [];
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
if (!showTooltipHeader) {
|
||||
return null;
|
||||
}
|
||||
const data = uPlotInstance.data;
|
||||
const cursorIdx = uPlotInstance.cursor.idx;
|
||||
if (cursorIdx == null) {
|
||||
@@ -30,7 +34,12 @@ export default function Tooltip({
|
||||
return dayjs(data[0][cursorIdx] * 1000)
|
||||
.tz(timezone)
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
}, [timezone, uPlotInstance.data, uPlotInstance.cursor.idx]);
|
||||
}, [
|
||||
timezone,
|
||||
uPlotInstance.data,
|
||||
uPlotInstance.cursor.idx,
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -39,9 +48,11 @@ export default function Tooltip({
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
>
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
|
||||
@@ -60,6 +60,7 @@ export interface TooltipRenderArgs {
|
||||
}
|
||||
|
||||
export interface BaseTooltipProps {
|
||||
showTooltipHeader?: boolean;
|
||||
timezone: string;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
@@ -74,7 +75,14 @@ export interface BarTooltipProps extends BaseTooltipProps, TooltipRenderArgs {
|
||||
isStackedBarChart?: boolean;
|
||||
}
|
||||
|
||||
export type TooltipProps = TimeSeriesTooltipProps | BarTooltipProps;
|
||||
export interface HistogramTooltipProps
|
||||
extends BaseTooltipProps,
|
||||
TooltipRenderArgs {}
|
||||
|
||||
export type TooltipProps =
|
||||
| TimeSeriesTooltipProps
|
||||
| BarTooltipProps
|
||||
| HistogramTooltipProps;
|
||||
|
||||
export enum LegendPosition {
|
||||
BOTTOM = 'bottom',
|
||||
|
||||
@@ -387,7 +387,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
|
||||
} = this.getVisibilityResolutionState();
|
||||
|
||||
config.series = [
|
||||
{ value: (): string => '' }, // Base series for timestamp
|
||||
{ value: (): string => '', label: 'Timestamp' }, // Base series for timestamp
|
||||
...this.series.map((s) => {
|
||||
const series = s.getConfig();
|
||||
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
|
||||
|
||||
@@ -49,7 +49,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
}: {
|
||||
resolvedLineColor: string;
|
||||
}): Partial<Series> {
|
||||
const { lineWidth, lineStyle, lineCap } = this.props;
|
||||
const { lineWidth, lineStyle, lineCap, fillColor } = this.props;
|
||||
const lineConfig: Partial<Series> = {
|
||||
stroke: resolvedLineColor,
|
||||
width: lineWidth ?? 2,
|
||||
@@ -63,8 +63,12 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
lineConfig.cap = lineCap;
|
||||
}
|
||||
|
||||
if (this.props.panelType === PANEL_TYPES.BAR) {
|
||||
if (fillColor) {
|
||||
lineConfig.fill = fillColor;
|
||||
} else if (this.props.panelType === PANEL_TYPES.BAR) {
|
||||
lineConfig.fill = resolvedLineColor;
|
||||
} else if (this.props.panelType === PANEL_TYPES.HISTOGRAM) {
|
||||
lineConfig.fill = `${resolvedLineColor}40`;
|
||||
}
|
||||
|
||||
return lineConfig;
|
||||
@@ -147,6 +151,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
pointsConfig.show = false;
|
||||
} else if (showPoints === VisibilityMode.Always) {
|
||||
pointsConfig.show = true;
|
||||
} else {
|
||||
pointsConfig.show = false; // default to hidden
|
||||
}
|
||||
|
||||
return pointsConfig;
|
||||
|
||||
@@ -175,6 +175,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
pointsBuilder?: Series.Points.Show;
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
fillColor?: string;
|
||||
isDarkMode?: boolean;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
updateWindowSize,
|
||||
} from './tooltipController';
|
||||
import {
|
||||
DashboardCursorSync,
|
||||
TooltipControllerContext,
|
||||
TooltipControllerState,
|
||||
TooltipLayoutInfo,
|
||||
@@ -34,7 +35,7 @@ export default function TooltipPlugin({
|
||||
render,
|
||||
maxWidth = 300,
|
||||
maxHeight = 400,
|
||||
syncMode = 'NONE',
|
||||
syncMode = DashboardCursorSync.None,
|
||||
syncKey = '_tooltip_sync_global_',
|
||||
canPinTooltip = false,
|
||||
}: TooltipPluginProps): JSX.Element | null {
|
||||
@@ -77,11 +78,11 @@ export default function TooltipPlugin({
|
||||
// render on every mouse move.
|
||||
const controller: TooltipControllerState = createInitialControllerState();
|
||||
|
||||
const syncTooltipWithDashboard = syncMode === 'TOOLTIP';
|
||||
const syncTooltipWithDashboard = syncMode === DashboardCursorSync.Tooltip;
|
||||
|
||||
// Enable uPlot's built-in cursor sync when requested so that
|
||||
// crosshair / tooltip can follow the dashboard-wide cursor.
|
||||
if (syncMode !== 'NONE' && config.scales[0]?.props.time) {
|
||||
if (syncMode !== DashboardCursorSync.None && config.scales[0]?.props.time) {
|
||||
config.setCursor({
|
||||
sync: { key: syncKey, scales: ['x', null] },
|
||||
});
|
||||
|
||||
@@ -4,7 +4,6 @@ import type {
|
||||
ReactNode,
|
||||
RefObject,
|
||||
} from 'react';
|
||||
import { CrossPanelSync } from 'types/api/dashboard/getAll';
|
||||
import type uPlot from 'uplot';
|
||||
|
||||
import type { TooltipRenderArgs } from '../../components/types';
|
||||
@@ -12,6 +11,12 @@ import type { UPlotConfigBuilder } from '../../config/UPlotConfigBuilder';
|
||||
|
||||
export const TOOLTIP_OFFSET = 10;
|
||||
|
||||
export enum DashboardCursorSync {
|
||||
Crosshair,
|
||||
None,
|
||||
Tooltip,
|
||||
}
|
||||
|
||||
export interface TooltipViewState {
|
||||
plot?: uPlot | null;
|
||||
style: Partial<CSSProperties>;
|
||||
@@ -30,7 +35,7 @@ export interface TooltipLayoutInfo {
|
||||
export interface TooltipPluginProps {
|
||||
config: UPlotConfigBuilder;
|
||||
canPinTooltip?: boolean;
|
||||
syncMode?: CrossPanelSync;
|
||||
syncMode?: DashboardCursorSync;
|
||||
syncKey?: string;
|
||||
render: (args: TooltipRenderArgs) => ReactNode;
|
||||
maxWidth?: number;
|
||||
@@ -81,7 +86,7 @@ export interface TooltipControllerContext {
|
||||
rafId: MutableRefObject<number | null>;
|
||||
updateState: (updates: Partial<TooltipViewState>) => void;
|
||||
renderRef: MutableRefObject<(args: TooltipRenderArgs) => ReactNode>;
|
||||
syncMode: CrossPanelSync;
|
||||
syncMode: DashboardCursorSync;
|
||||
syncKey: string;
|
||||
canPinTooltip: boolean;
|
||||
createTooltipContents: () => React.ReactNode;
|
||||
|
||||
@@ -7,6 +7,7 @@ import type uPlot from 'uplot';
|
||||
import { TooltipRenderArgs } from '../../components/types';
|
||||
import { UPlotConfigBuilder } from '../../config/UPlotConfigBuilder';
|
||||
import TooltipPlugin from '../TooltipPlugin/TooltipPlugin';
|
||||
import { DashboardCursorSync } from '../TooltipPlugin/types';
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Mock helpers
|
||||
@@ -99,7 +100,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: renderFn,
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
...extraProps,
|
||||
}),
|
||||
);
|
||||
@@ -126,7 +127,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => React.createElement('div', null, 'tooltip-body'),
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -140,7 +141,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => null,
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -216,7 +217,7 @@ describe('TooltipPlugin', () => {
|
||||
{ type: 'button', onClick: args.dismiss },
|
||||
'Dismiss',
|
||||
),
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
canPinTooltip: true,
|
||||
}),
|
||||
);
|
||||
@@ -260,7 +261,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config: config,
|
||||
render: () => React.createElement('div', null, 'tooltip-body'),
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
canPinTooltip: true,
|
||||
}),
|
||||
);
|
||||
@@ -304,7 +305,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => React.createElement('div', null, 'pinned content'),
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
canPinTooltip: true,
|
||||
}),
|
||||
);
|
||||
@@ -347,7 +348,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => React.createElement('div', null, 'pinned content'),
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
canPinTooltip: true,
|
||||
}),
|
||||
);
|
||||
@@ -397,7 +398,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => null,
|
||||
syncMode: 'TOOLTIP',
|
||||
syncMode: DashboardCursorSync.Tooltip,
|
||||
syncKey: 'dashboard-sync',
|
||||
}),
|
||||
);
|
||||
@@ -416,7 +417,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => null,
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -432,7 +433,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => null,
|
||||
syncMode: 'TOOLTIP',
|
||||
syncMode: DashboardCursorSync.Tooltip,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -452,7 +453,7 @@ describe('TooltipPlugin', () => {
|
||||
React.createElement(TooltipPlugin, {
|
||||
config,
|
||||
render: () => null,
|
||||
syncMode: 'NONE',
|
||||
syncMode: DashboardCursorSync.None,
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
@@ -11,22 +11,6 @@ import { Threshold } from '../hooks/types';
|
||||
import { findMinMaxThresholdValues } from './threshold';
|
||||
import { LogScaleLimits, RangeFunctionParams } from './types';
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps min/max/softMin/softMax to valid log-scale values (powers of logBase).
|
||||
* Only applies when distribution is logarithmic; otherwise returns limits unchanged.
|
||||
@@ -213,25 +197,6 @@ function getLogScaleRange(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps linear scale min down and max up to whole numbers so axis bounds are clean.
|
||||
*/
|
||||
function roundLinearRange(minMax: Range.MinMax): Range.MinMax {
|
||||
const [currentMin, currentMax] = minMax;
|
||||
let roundedMin = currentMin;
|
||||
let roundedMax = currentMax;
|
||||
|
||||
if (roundedMin != null) {
|
||||
roundedMin = incrRoundDn(roundedMin, 1);
|
||||
}
|
||||
|
||||
if (roundedMax != null) {
|
||||
roundedMax = incrRoundUp(roundedMax, 1);
|
||||
}
|
||||
|
||||
return [roundedMin, roundedMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps log-scale [min, max] to exact powers of logBase (nearest magnitude below/above).
|
||||
* If min and max would be equal after snapping, max is increased by one magnitude so the range is valid.
|
||||
@@ -330,7 +295,6 @@ export function createRangeFunction(
|
||||
|
||||
if (scale.distr === 1) {
|
||||
minMax = getLinearScaleRange(minMax, params, dataMin, dataMax);
|
||||
minMax = roundLinearRange(minMax);
|
||||
} else if (scale.distr === 3) {
|
||||
minMax = getLogScaleRange(minMax, params, dataMin, dataMax, logBase);
|
||||
const logFn = scale.log === 2 ? Math.log2 : Math.log10;
|
||||
|
||||
@@ -81,13 +81,6 @@ export interface DashboardTemplate {
|
||||
previewImage: string;
|
||||
}
|
||||
|
||||
export const CROSS_PANEL_SYNC_OPTIONS = [
|
||||
'NONE',
|
||||
'CROSSHAIR',
|
||||
'TOOLTIP',
|
||||
] as const;
|
||||
export type CrossPanelSync = typeof CROSS_PANEL_SYNC_OPTIONS[number];
|
||||
|
||||
export interface DashboardData {
|
||||
// uuid?: string;
|
||||
description?: string;
|
||||
@@ -100,7 +93,6 @@ export interface DashboardData {
|
||||
variables: Record<string, IDashboardVariable>;
|
||||
version?: string;
|
||||
image?: string;
|
||||
crossPanelSync?: CrossPanelSync;
|
||||
}
|
||||
|
||||
export interface WidgetRow {
|
||||
|
||||
@@ -3,3 +3,33 @@
|
||||
* Example: 1.5 → 2, 1.49 → 1
|
||||
*/
|
||||
export const roundHalfUp = (value: number): number => Math.floor(value + 0.5);
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number to the nearest multiple of 10^dec.
|
||||
* Used for decimal precision.
|
||||
*/
|
||||
export function roundDecimals(val: number, dec = 0): number {
|
||||
if (Number.isInteger(val)) {
|
||||
return val;
|
||||
}
|
||||
|
||||
const p = 10 ** dec;
|
||||
const n = val * p * (1 + Number.EPSILON);
|
||||
return Math.round(n) / p;
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ package flagger
|
||||
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -18,14 +17,6 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureInterpolationEnabled,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether to enable interpolation",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureKafkaSpanEval,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
28
pkg/modules/identity/identity.go
Normal file
28
pkg/modules/identity/identity.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package identity
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
// CreateIdentityWithRoles creates an identity and assigns roles to it
|
||||
CreateIdentityWithRoles(context.Context, valuer.UUID, valuer.UUID, []string) error
|
||||
|
||||
// GrantRole grants a role to an identity
|
||||
GrantRole(context.Context, valuer.UUID, string) error
|
||||
|
||||
// RevokeRole revokes a role from an identity
|
||||
RevokeRole(context.Context, valuer.UUID, string) error
|
||||
|
||||
// GetRoles gets all roles for an identity
|
||||
GetRoles(context.Context, valuer.UUID, valuer.UUID) ([]*roletypes.Role, error)
|
||||
|
||||
// CountByRoleAndOrgID counts identities with a specific role in an org
|
||||
CountByRoleAndOrgID(context.Context, string, valuer.UUID) (int64, error)
|
||||
|
||||
// DeleteIdentity deletes an identity and its associated roles
|
||||
DeleteIdentity(context.Context, valuer.UUID, valuer.UUID) error
|
||||
}
|
||||
81
pkg/modules/identity/implidentity/module.go
Normal file
81
pkg/modules/identity/implidentity/module.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package implidentity
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/types/identitytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store identitytypes.Store
|
||||
}
|
||||
|
||||
func NewModule(store identitytypes.Store) identity.Module {
|
||||
return &module{
|
||||
store: store,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *module) CreateIdentityWithRoles(ctx context.Context, id valuer.UUID, orgID valuer.UUID, roleNames []string) error {
|
||||
return m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Create identity
|
||||
ident := identitytypes.NewStorableIdentity(id, orgID)
|
||||
if err := m.store.CreateIdentity(ctx, ident); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create identity_role mappings for each role
|
||||
for _, roleName := range roleNames {
|
||||
identityRole := identitytypes.NewStorableIdentityRole(id, roleName)
|
||||
if err := m.store.CreateIdentityRole(ctx, identityRole); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (m *module) GrantRole(ctx context.Context, identityID valuer.UUID, roleName string) error {
|
||||
storableIdentityRole := identitytypes.NewStorableIdentityRole(identityID, roleName)
|
||||
return m.store.CreateIdentityRole(ctx, storableIdentityRole)
|
||||
}
|
||||
|
||||
func (m *module) RevokeRole(ctx context.Context, identityID valuer.UUID, roleName string) error {
|
||||
return m.store.DeleteIdentityRole(ctx, identityID, roleName)
|
||||
}
|
||||
|
||||
func (m *module) GetRoles(ctx context.Context, id valuer.UUID, orgID valuer.UUID) ([]*roletypes.Role, error) {
|
||||
storableRoles, err := m.store.GetRolesByIdentityID(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]*roletypes.Role, 0, len(storableRoles))
|
||||
for _, storableRole := range storableRoles {
|
||||
roles = append(roles, roletypes.NewRoleFromStorableRole(storableRole))
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (m *module) DeleteIdentity(ctx context.Context, identityID valuer.UUID, _ valuer.UUID) error {
|
||||
return m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
if err := m.store.DeleteIdentityRoles(ctx, identityID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := m.store.DeleteIdentity(ctx, identityID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (m *module) CountByRoleAndOrgID(ctx context.Context, roleName string, orgID valuer.UUID) (int64, error) {
|
||||
return m.store.CountByRoleNameAndOrgID(ctx, roleName, orgID)
|
||||
}
|
||||
126
pkg/modules/identity/implidentity/store.go
Normal file
126
pkg/modules/identity/implidentity/store.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package implidentity
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/identitytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func NewStore(sqlstore sqlstore.SQLStore) identitytypes.Store {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (s *store) CreateIdentity(ctx context.Context, identity *identitytypes.StorableIdentity) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(identity).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create identity")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) CreateIdentityRole(ctx context.Context, identityRole *identitytypes.StorableIdentityRole) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(identityRole).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create identity role")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) DeleteIdentityRole(ctx context.Context, identityID valuer.UUID, roleName string) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&identitytypes.StorableIdentityRole{}).
|
||||
Where("identity_id = ?", identityID).
|
||||
Where("role_name = ?", roleName).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity role")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) DeleteIdentityRoles(ctx context.Context, identityID valuer.UUID) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&identitytypes.StorableIdentityRole{}).
|
||||
Where("identity_id = ?", identityID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity roles")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) DeleteIdentity(ctx context.Context, identityID valuer.UUID) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&identitytypes.StorableIdentity{}).
|
||||
Where("id = ?", identityID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) GetRolesByIdentityID(ctx context.Context, identityID valuer.UUID) ([]*roletypes.StorableRole, error) {
|
||||
var roles []*roletypes.StorableRole
|
||||
err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&roles).
|
||||
Join("JOIN identity_role ON identity_role.role_name = role.name").
|
||||
Where("identity_role.identity_id = ?", identityID).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get roles for identity %s", identityID)
|
||||
}
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (s *store) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
|
||||
return s.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
|
||||
func (s *store) CountByRoleNameAndOrgID(ctx context.Context, roleName string, orgID valuer.UUID) (int64, error) {
|
||||
count, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model((*identitytypes.StorableIdentityRole)(nil)).
|
||||
Join("JOIN identity ON identity.id = identity_role.identity_id").
|
||||
Where("identity_role.role_name = ?", roleName).
|
||||
Where("identity.org_id = ?", orgID).
|
||||
Count(ctx)
|
||||
if err != nil {
|
||||
return 0, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to count identities by role")
|
||||
}
|
||||
return int64(count), nil
|
||||
}
|
||||
@@ -30,3 +30,7 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
|
||||
|
||||
return module.store.ListByKeyRange(ctx, start, end)
|
||||
}
|
||||
|
||||
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
return module.store.GetByName(ctx, name)
|
||||
}
|
||||
|
||||
@@ -47,6 +47,22 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
organization := new(types.Organization)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(organization).
|
||||
Where("name = ?", name).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
|
||||
}
|
||||
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
organizations := make([]*types.Organization, 0)
|
||||
err := store.
|
||||
|
||||
@@ -14,6 +14,9 @@ type Getter interface {
|
||||
|
||||
// ListByOwnedKeyRange gets all the organizations owned by the instance
|
||||
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
|
||||
|
||||
// Gets the organization by name
|
||||
GetByName(context.Context, string) (*types.Organization, error)
|
||||
}
|
||||
|
||||
type Setter interface {
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -28,9 +29,10 @@ type module struct {
|
||||
authDomain authdomain.Module
|
||||
tokenizer tokenizer.Tokenizer
|
||||
orgGetter organization.Getter
|
||||
identity identity.Module
|
||||
}
|
||||
|
||||
func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter) session.Module {
|
||||
func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter, identity identity.Module) session.Module {
|
||||
return &module{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/session/implsession"),
|
||||
authNs: authNs,
|
||||
@@ -39,6 +41,7 @@ func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.A
|
||||
authDomain: authDomain,
|
||||
tokenizer: tokenizer,
|
||||
orgGetter: orgGetter,
|
||||
identity: identity,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,6 +154,10 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -5,11 +5,26 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Password PasswordConfig `mapstructure:"password"`
|
||||
Root RootConfig `mapstructure:"root"`
|
||||
}
|
||||
|
||||
type RootConfig struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
Email valuer.Email `mapstructure:"email"`
|
||||
Password string `mapstructure:"password"`
|
||||
Org OrgConfig `mapstructure:"org"`
|
||||
}
|
||||
|
||||
type OrgConfig struct {
|
||||
Name string `mapstructure:"name"`
|
||||
}
|
||||
|
||||
type PasswordConfig struct {
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
}
|
||||
@@ -31,6 +46,12 @@ func newConfig() factory.Config {
|
||||
MaxTokenLifetime: 6 * time.Hour,
|
||||
},
|
||||
},
|
||||
Root: RootConfig{
|
||||
Enabled: false,
|
||||
Org: OrgConfig{
|
||||
Name: "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,5 +60,17 @@ func (c Config) Validate() error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Root.Enabled {
|
||||
if c.Root.Email.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
|
||||
}
|
||||
if c.Root.Password == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
|
||||
}
|
||||
if !types.IsPasswordValid(c.Root.Password) {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -16,6 +16,10 @@ func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
return module.store.GetRootUserByOrgID(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
users, err := module.store.ListUsersByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
root "github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -33,10 +34,11 @@ type Module struct {
|
||||
authz authz.AuthZ
|
||||
analytics analytics.Analytics
|
||||
config user.Config
|
||||
identity identity.Module
|
||||
}
|
||||
|
||||
// This module is a WIP, don't take inspiration from this.
|
||||
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
|
||||
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config, identity identity.Module) root.Module {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
|
||||
return &Module{
|
||||
store: store,
|
||||
@@ -47,6 +49,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
|
||||
analytics: analytics,
|
||||
authz: authz,
|
||||
config: config,
|
||||
identity: identity,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -103,6 +106,12 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
|
||||
}
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
|
||||
}
|
||||
@@ -174,6 +183,12 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
|
||||
}
|
||||
|
||||
if err := module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Create identity with roles via identity module
|
||||
if err := module.identity.CreateIdentityWithRoles(ctx, input.ID, input.OrgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role)}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create user
|
||||
if err := module.store.CreateUser(ctx, input); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -202,42 +217,41 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot update root user")
|
||||
}
|
||||
|
||||
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only displayName, role can be updated
|
||||
if user.DisplayName == "" {
|
||||
user.DisplayName = existingUser.DisplayName
|
||||
}
|
||||
|
||||
if user.Role == "" {
|
||||
user.Role = existingUser.Role
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
|
||||
}
|
||||
|
||||
// Make sure that th e request is not demoting the last admin user.
|
||||
// also an admin user can only change role of their own or other user
|
||||
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
// Make sure that the request is not demoting the last admin user.
|
||||
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
adminCount, err := m.identity.CountByRoleAndOrgID(ctx, roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin), orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(adminUsers) == 1 {
|
||||
if adminCount == 1 {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot demote the last admin")
|
||||
}
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role {
|
||||
oldRole := existingUser.Role
|
||||
newRole := user.Role
|
||||
roleChanged := newRole != "" && newRole != oldRole
|
||||
|
||||
if roleChanged {
|
||||
// Update authz (OpenFGA) first - must be outside transaction, is idempotent
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(newRole),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -245,23 +259,59 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
existingUser.Update(user.DisplayName, user.Role)
|
||||
|
||||
if roleChanged {
|
||||
// Wrap identity_role sync and user update in a transaction
|
||||
if err := m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Sync identity_role: revoke old role and grant new role
|
||||
if err := m.identity.RevokeRole(ctx, existingUser.IdentityID, roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := m.identity.GrantRole(ctx, existingUser.IdentityID, roletypes.MustGetSigNozManagedRoleFromExistingRole(newRole)); err != nil {
|
||||
return err
|
||||
}
|
||||
// Update user
|
||||
if err := m.store.UpdateUser(ctx, orgID, existingUser); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
// No role change, just update user directly
|
||||
if err := m.store.UpdateUser(ctx, orgID, existingUser); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Analytics and tokenizer cleanup
|
||||
traits := types.NewTraitsFromUser(existingUser)
|
||||
m.analytics.IdentifyUser(ctx, existingUser.OrgID.String(), existingUser.ID.String(), traits)
|
||||
m.analytics.TrackUser(ctx, existingUser.OrgID.String(), existingUser.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := m.tokenizer.DeleteIdentity(ctx, existingUser.ID); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(updatedUser)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
return existingUser, nil
|
||||
}
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
|
||||
return nil, err
|
||||
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return updatedUser, nil
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
|
||||
@@ -270,23 +320,48 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot delete root user")
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
|
||||
}
|
||||
|
||||
// don't allow to delete the last admin user
|
||||
adminUsers, err := module.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
// Get user's roles from identity module
|
||||
userRoles, err := module.identity.GetRoles(ctx, user.IdentityID, user.OrgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(adminUsers) == 1 && user.Role == types.RoleAdmin {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
// Check if user has admin role
|
||||
adminRoleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin)
|
||||
hasAdminRole := slices.ContainsFunc(userRoles, func(r *roletypes.Role) bool {
|
||||
return r.Name == adminRoleName
|
||||
})
|
||||
|
||||
// don't allow to delete the last admin user
|
||||
if hasAdminRole {
|
||||
adminCount, err := module.identity.CountByRoleAndOrgID(ctx, adminRoleName, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if adminCount == 1 {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
}
|
||||
}
|
||||
|
||||
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
|
||||
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
// Revoke all roles for the user
|
||||
for _, userRole := range userRoles {
|
||||
err = module.authz.Revoke(ctx, orgID, userRole.Name, authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Delete identity and identity_role records
|
||||
if err := module.identity.DeleteIdentity(ctx, user.IdentityID, user.OrgID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -302,6 +377,15 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
}
|
||||
|
||||
func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID valuer.UUID) (*types.ResetPasswordToken, error) {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
@@ -364,6 +448,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
|
||||
if err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
|
||||
@@ -407,6 +495,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
return err
|
||||
}
|
||||
|
||||
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
if err := password.Update(passwd); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -415,6 +512,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
}
|
||||
|
||||
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot change password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -476,7 +582,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
|
||||
}
|
||||
|
||||
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
|
||||
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
|
||||
user, err := types.NewRootUser(name, email, organization.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -505,7 +611,7 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.createUserWithoutGrant(ctx, user, root.WithFactorPassword(password))
|
||||
err = module.createUserWithoutGrant(ctx, user, root.WithFactorPassword(password), root.WithRole(types.RoleAdmin))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -536,6 +642,12 @@ func (module *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
|
||||
func (module *Module) createUserWithoutGrant(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
if err := module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Create identity with roles via identity module
|
||||
if err := module.identity.CreateIdentityWithRoles(ctx, input.ID, input.OrgID, []string{roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role)}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create user
|
||||
if err := module.store.CreateUser(ctx, input); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
191
pkg/modules/user/impluser/service.go
Normal file
191
pkg/modules/user/impluser/service.go
Normal file
@@ -0,0 +1,191 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type service struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store types.UserStore
|
||||
module user.Module
|
||||
orgGetter organization.Getter
|
||||
authz authz.AuthZ
|
||||
identity identity.Module
|
||||
config user.RootConfig
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
providerSettings factory.ProviderSettings,
|
||||
store types.UserStore,
|
||||
module user.Module,
|
||||
orgGetter organization.Getter,
|
||||
authz authz.AuthZ,
|
||||
identity identity.Module,
|
||||
config user.RootConfig,
|
||||
) user.Service {
|
||||
return &service{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
|
||||
store: store,
|
||||
module: module,
|
||||
orgGetter: orgGetter,
|
||||
authz: authz,
|
||||
identity: identity,
|
||||
config: config,
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error {
|
||||
if !s.config.Enabled {
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
err := s.reconcile(ctx)
|
||||
if err == nil {
|
||||
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
|
||||
|
||||
select {
|
||||
case <-s.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Stop(ctx context.Context) error {
|
||||
close(s.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) reconcile(ctx context.Context) error {
|
||||
org, err := s.orgGetter.GetByName(ctx, s.config.Org.Name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
newOrg := types.NewOrganization(s.config.Org.Name, s.config.Org.Name)
|
||||
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return s.reconcileRootUser(ctx, org.ID)
|
||||
}
|
||||
|
||||
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingRoot == nil {
|
||||
return s.createOrPromoteRootUser(ctx, orgID)
|
||||
}
|
||||
|
||||
return s.updateExistingRootUser(ctx, orgID, existingRoot)
|
||||
}
|
||||
|
||||
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
oldRole := existingUser.Role
|
||||
|
||||
existingUser.PromoteToRoot()
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if oldRole != types.RoleAdmin {
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingUser.ID)
|
||||
}
|
||||
|
||||
// Create new root user
|
||||
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
|
||||
}
|
||||
|
||||
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
|
||||
existingRoot.PromoteToRoot()
|
||||
|
||||
if existingRoot.Email != s.config.Email {
|
||||
existingRoot.UpdateEmail(s.config.Email)
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingRoot.ID)
|
||||
}
|
||||
|
||||
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
|
||||
password, err := s.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.CreatePassword(ctx, factorPassword)
|
||||
}
|
||||
|
||||
if !password.Equals(s.config.Password) {
|
||||
if err := password.Update(s.config.Password); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.UpdatePassword(ctx, password)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -210,20 +210,24 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
|
||||
user.UpdatedAt = time.Now()
|
||||
_, err := store.sqlstore.BunDB().NewUpdate().
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(user).
|
||||
Column("display_name").
|
||||
Column("email").
|
||||
Column("role").
|
||||
Column("is_root").
|
||||
Column("updated_at").
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", user.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
|
||||
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
|
||||
}
|
||||
return user, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
|
||||
@@ -602,6 +606,23 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
|
||||
})
|
||||
}
|
||||
|
||||
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
user := new(types.User)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(user).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("is_root = ?", true).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
|
||||
users := []*types.User{}
|
||||
err := store.
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
type createUserOptions struct {
|
||||
FactorPassword *types.FactorPassword
|
||||
Role types.Role
|
||||
}
|
||||
|
||||
type CreateUserOption func(*createUserOptions)
|
||||
@@ -17,9 +18,16 @@ func WithFactorPassword(factorPassword *types.FactorPassword) CreateUserOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithRole(role types.Role) CreateUserOption {
|
||||
return func(o *createUserOptions) {
|
||||
o.Role = role
|
||||
}
|
||||
}
|
||||
|
||||
func NewCreateUserOptions(opts ...CreateUserOption) *createUserOptions {
|
||||
o := &createUserOptions{
|
||||
FactorPassword: nil,
|
||||
Role: types.RoleViewer, // default role
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
|
||||
7
pkg/modules/user/service.go
Normal file
7
pkg/modules/user/service.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package user
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type Service interface {
|
||||
factory.Service
|
||||
}
|
||||
@@ -34,6 +34,9 @@ type Module interface {
|
||||
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
|
||||
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
|
||||
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
|
||||
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
|
||||
|
||||
// invite
|
||||
@@ -54,6 +57,9 @@ type Module interface {
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get root user by org id.
|
||||
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
|
||||
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ type APIHandlerOpts struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Signoz.Cache,
|
||||
@@ -270,6 +270,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// If the root user is enabled, the setup is complete
|
||||
if config.User.Root.Enabled {
|
||||
aH.SetupCompleted = true
|
||||
}
|
||||
|
||||
aH.Upgrader = &websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
@@ -2045,7 +2050,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
organization := types.NewOrganization(req.OrgDisplayName)
|
||||
organization := types.NewOrganization(req.OrgDisplayName, req.OrgName)
|
||||
user, errv2 := aH.Signoz.Modules.User.CreateFirstUser(r.Context(), organization, req.Name, req.Email, req.Password)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
|
||||
@@ -135,7 +135,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -43,11 +43,11 @@ var (
|
||||
// FromUnit returns a converter for the given unit
|
||||
func FromUnit(u Unit) Converter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
|
||||
return DurationConverter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
|
||||
return DataConverter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
|
||||
return DataRateConverter
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentConverter
|
||||
|
||||
@@ -58,36 +58,80 @@ func (*dataConverter) Name() string {
|
||||
return "data"
|
||||
}
|
||||
|
||||
// Notation followed by UCUM:
|
||||
// https://ucum.org/ucum
|
||||
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
|
||||
// kilo = k, mega = M, giga = G, tera = T, peta = P
|
||||
// exa = E, zetta = Z, yotta = Y
|
||||
// byte = By, bit = bit
|
||||
func FromDataUnit(u Unit) float64 {
|
||||
switch u {
|
||||
case "bytes", "By": // base 2
|
||||
return Byte
|
||||
case "decbytes": // base 10
|
||||
return Byte
|
||||
case "bits": // base 2
|
||||
case "bits", "bit": // base 2
|
||||
return Bit
|
||||
case "decbits": // base 10
|
||||
return Bit
|
||||
case "kbytes", "kBy": // base 2
|
||||
case "kbytes", "KiBy": // base 2
|
||||
return Kibibyte
|
||||
case "decKbytes", "deckbytes": // base 10
|
||||
case "decKbytes", "deckbytes", "kBy": // base 10
|
||||
return Kilobyte
|
||||
case "mbytes", "MBy": // base 2
|
||||
case "mbytes", "MiBy": // base 2
|
||||
return Mebibyte
|
||||
case "decMbytes", "decmbytes": // base 10
|
||||
case "decMbytes", "decmbytes", "MBy": // base 10
|
||||
return Megabyte
|
||||
case "gbytes", "GBy": // base 2
|
||||
case "gbytes", "GiBy": // base 2
|
||||
return Gibibyte
|
||||
case "decGbytes", "decgbytes": // base 10
|
||||
case "decGbytes", "decgbytes", "GBy": // base 10
|
||||
return Gigabyte
|
||||
case "tbytes", "TBy": // base 2
|
||||
case "tbytes", "TiBy": // base 2
|
||||
return Tebibyte
|
||||
case "decTbytes", "dectbytes": // base 10
|
||||
case "decTbytes", "dectbytes", "TBy": // base 10
|
||||
return Terabyte
|
||||
case "pbytes", "PBy": // base 2
|
||||
case "pbytes", "PiBy": // base 2
|
||||
return Pebibyte
|
||||
case "decPbytes", "decpbytes": // base 10
|
||||
case "decPbytes", "decpbytes", "PBy": // base 10
|
||||
return Petabyte
|
||||
case "EBy": // base 10
|
||||
return Exabyte
|
||||
case "ZBy": // base 10
|
||||
return Zettabyte
|
||||
case "YBy": // base 10
|
||||
return Yottabyte
|
||||
case "Kibit": // base 2
|
||||
return Kibibit
|
||||
case "Mibit": // base 2
|
||||
return Mebibit
|
||||
case "Gibit": // base 2
|
||||
return Gibibit
|
||||
case "Tibit": // base 2
|
||||
return Tebibit
|
||||
case "Pibit": // base 2
|
||||
return Pebibit
|
||||
case "EiBy": // base 2
|
||||
return Exbibyte
|
||||
case "ZiBy": // base 2
|
||||
return Zebibyte
|
||||
case "YiBy": // base 2
|
||||
return Yobibyte
|
||||
case "kbit": // base 10
|
||||
return Kilobit
|
||||
case "Mbit": // base 10
|
||||
return Megabit
|
||||
case "Gbit": // base 10
|
||||
return Gigabit
|
||||
case "Tbit": // base 10
|
||||
return Terabit
|
||||
case "Pbit": // base 10
|
||||
return Petabit
|
||||
case "Ebit": // base 10
|
||||
return Exabit
|
||||
case "Zbit": // base 10
|
||||
return Zettabit
|
||||
case "Ybit": // base 10
|
||||
return Yottabit
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -54,6 +54,12 @@ func (*dataRateConverter) Name() string {
|
||||
return "data_rate"
|
||||
}
|
||||
|
||||
// Notation followed by UCUM:
|
||||
// https://ucum.org/ucum
|
||||
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
|
||||
// kilo = k, mega = M, giga = G, tera = T, peta = P
|
||||
// exa = E, zetta = Z, yotta = Y
|
||||
// byte = By, bit = bit
|
||||
func FromDataRateUnit(u Unit) float64 {
|
||||
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
|
||||
switch u {
|
||||
@@ -65,46 +71,70 @@ func FromDataRateUnit(u Unit) float64 {
|
||||
return BitPerSecond
|
||||
case "bps", "bit/s": // bits/sec(SI)
|
||||
return BitPerSecond
|
||||
case "KiBs": // kibibytes/sec
|
||||
case "KiBs", "KiBy/s": // kibibytes/sec
|
||||
return KibibytePerSecond
|
||||
case "Kibits": // kibibits/sec
|
||||
case "Kibits", "Kibit/s": // kibibits/sec
|
||||
return KibibitPerSecond
|
||||
case "KBs", "kBy/s": // kilobytes/sec
|
||||
return KilobytePerSecond
|
||||
case "Kbits", "kbit/s": // kilobits/sec
|
||||
return KilobitPerSecond
|
||||
case "MiBs": // mebibytes/sec
|
||||
case "MiBs", "MiBy/s": // mebibytes/sec
|
||||
return MebibytePerSecond
|
||||
case "Mibits": // mebibits/sec
|
||||
case "Mibits", "Mibit/s": // mebibits/sec
|
||||
return MebibitPerSecond
|
||||
case "MBs", "MBy/s": // megabytes/sec
|
||||
return MegabytePerSecond
|
||||
case "Mbits", "Mbit/s": // megabits/sec
|
||||
return MegabitPerSecond
|
||||
case "GiBs": // gibibytes/sec
|
||||
case "GiBs", "GiBy/s": // gibibytes/sec
|
||||
return GibibytePerSecond
|
||||
case "Gibits": // gibibits/sec
|
||||
case "Gibits", "Gibit/s": // gibibits/sec
|
||||
return GibibitPerSecond
|
||||
case "GBs", "GBy/s": // gigabytes/sec
|
||||
return GigabytePerSecond
|
||||
case "Gbits", "Gbit/s": // gigabits/sec
|
||||
return GigabitPerSecond
|
||||
case "TiBs": // tebibytes/sec
|
||||
case "TiBs", "TiBy/s": // tebibytes/sec
|
||||
return TebibytePerSecond
|
||||
case "Tibits": // tebibits/sec
|
||||
case "Tibits", "Tibit/s": // tebibits/sec
|
||||
return TebibitPerSecond
|
||||
case "TBs", "TBy/s": // terabytes/sec
|
||||
return TerabytePerSecond
|
||||
case "Tbits", "Tbit/s": // terabits/sec
|
||||
return TerabitPerSecond
|
||||
case "PiBs": // pebibytes/sec
|
||||
case "PiBs", "PiBy/s": // pebibytes/sec
|
||||
return PebibytePerSecond
|
||||
case "Pibits": // pebibits/sec
|
||||
case "Pibits", "Pibit/s": // pebibits/sec
|
||||
return PebibitPerSecond
|
||||
case "PBs", "PBy/s": // petabytes/sec
|
||||
return PetabytePerSecond
|
||||
case "Pbits", "Pbit/s": // petabits/sec
|
||||
return PetabitPerSecond
|
||||
case "EBy/s": // exabytes/sec
|
||||
return ExabytePerSecond
|
||||
case "Ebit/s": // exabits/sec
|
||||
return ExabitPerSecond
|
||||
case "EiBy/s": // exbibytes/sec
|
||||
return ExbibytePerSecond
|
||||
case "Eibit/s": // exbibits/sec
|
||||
return ExbibitPerSecond
|
||||
case "ZBy/s": // zettabytes/sec
|
||||
return ZettabytePerSecond
|
||||
case "Zbit/s": // zettabits/sec
|
||||
return ZettabitPerSecond
|
||||
case "ZiBy/s": // zebibytes/sec
|
||||
return ZebibytePerSecond
|
||||
case "Zibit/s": // zebibits/sec
|
||||
return ZebibitPerSecond
|
||||
case "YBy/s": // yottabytes/sec
|
||||
return YottabytePerSecond
|
||||
case "Ybit/s": // yottabits/sec
|
||||
return YottabitPerSecond
|
||||
case "YiBy/s": // yobibytes/sec
|
||||
return YobibytePerSecond
|
||||
case "Yibit/s": // yobibits/sec
|
||||
return YobibitPerSecond
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -75,3 +75,83 @@ func TestDataRate(t *testing.T) {
|
||||
// 1024 * 1024 * 1024 bytes = 1 gbytes
|
||||
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
|
||||
}
|
||||
|
||||
func TestDataRateConversionUCUMUnit(t *testing.T) {
|
||||
dataRateConverter := NewDataRateConverter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input Value
|
||||
toUnit Unit
|
||||
expected Value
|
||||
}{
|
||||
// Binary byte scaling
|
||||
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
|
||||
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
|
||||
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
|
||||
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
|
||||
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
|
||||
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
|
||||
// Binary bit scaling
|
||||
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
|
||||
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
|
||||
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
|
||||
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
|
||||
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
|
||||
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
|
||||
// Bytes to bits
|
||||
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
|
||||
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
|
||||
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
|
||||
// Unit alias
|
||||
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
|
||||
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
|
||||
// SI byte scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
|
||||
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
|
||||
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
|
||||
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
|
||||
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
|
||||
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
|
||||
// Binary byte scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
|
||||
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
|
||||
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
|
||||
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
|
||||
// SI bit scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
|
||||
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
|
||||
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
|
||||
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
|
||||
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
|
||||
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
|
||||
// Binary bit scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
|
||||
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
|
||||
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
|
||||
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
|
||||
// Bytes to bits (Exbi, Zebi, Yobi)
|
||||
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
|
||||
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
|
||||
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataRateConverter.Convert(tt.input, tt.toUnit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
|
||||
// 1024 bytes = 1 kbytes
|
||||
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
|
||||
// 1 byte = 8 bits
|
||||
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
|
||||
// 1 mbytes = 1024 kbytes
|
||||
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
|
||||
// 1024 kbytes = 1 mbytes
|
||||
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
|
||||
// 1 mbytes = 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
|
||||
// 1024 mbytes = 1 gbytes
|
||||
@@ -45,10 +45,90 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
|
||||
// 1024 tbytes = 1 PiBy
|
||||
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
|
||||
// 1 pbytes = 1024 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
|
||||
// 1024 pbytes = 1 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
|
||||
// 1024 TiBy = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
|
||||
}
|
||||
|
||||
func TestDataConversionUCUMUnit(t *testing.T) {
|
||||
dataConverter := NewDataConverter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input Value
|
||||
toUnit Unit
|
||||
expected Value
|
||||
}{
|
||||
// Bits to bytes
|
||||
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
|
||||
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
|
||||
// Binary byte scaling
|
||||
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
|
||||
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
|
||||
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
|
||||
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
|
||||
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
|
||||
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
|
||||
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
|
||||
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
|
||||
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
|
||||
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
|
||||
// SI bit scaling
|
||||
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
|
||||
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
|
||||
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
|
||||
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
|
||||
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
|
||||
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
|
||||
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
|
||||
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
|
||||
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
|
||||
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
|
||||
// Binary bit scaling
|
||||
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
|
||||
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
|
||||
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
|
||||
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
|
||||
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
|
||||
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
|
||||
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
|
||||
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
|
||||
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
|
||||
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
|
||||
// Bytes to bits
|
||||
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
|
||||
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
|
||||
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
|
||||
// SI byte scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
|
||||
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
|
||||
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
|
||||
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
|
||||
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
|
||||
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
|
||||
// Binary byte scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
|
||||
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
|
||||
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
|
||||
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
|
||||
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
|
||||
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
|
||||
// SI bit scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
|
||||
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
|
||||
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
|
||||
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
|
||||
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
|
||||
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataConverter.Convert(tt.input, tt.toUnit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
|
||||
return Hour
|
||||
case "d":
|
||||
return Day
|
||||
case "w":
|
||||
case "w", "wk":
|
||||
return Week
|
||||
default:
|
||||
return Second
|
||||
|
||||
@@ -54,4 +54,13 @@ func TestDurationConvert(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
|
||||
// 1000000000 ns = 1 s
|
||||
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
|
||||
|
||||
// 7 d = 1 wk
|
||||
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
|
||||
// 1 wk = 7 d
|
||||
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
|
||||
// 1 wk = 168 h
|
||||
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
|
||||
// 604800 s = 1 wk
|
||||
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
|
||||
}
|
||||
|
||||
@@ -24,30 +24,62 @@ func (f *dataFormatter) Format(value float64, unit string) string {
|
||||
return humanize.IBytes(uint64(value))
|
||||
case "decbytes":
|
||||
return humanize.Bytes(uint64(value))
|
||||
case "bits":
|
||||
return humanize.IBytes(uint64(value * converter.Bit))
|
||||
case "bits", "bit":
|
||||
// humanize.IBytes/Bytes doesn't support bits
|
||||
// and returns 0 B for values less than a byte
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b", value)
|
||||
}
|
||||
return humanize.IBytes(uint64(value / 8))
|
||||
case "decbits":
|
||||
return humanize.Bytes(uint64(value * converter.Bit))
|
||||
case "kbytes", "kBy":
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b", value)
|
||||
}
|
||||
return humanize.Bytes(uint64(value / 8))
|
||||
case "kbytes", "KiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit))
|
||||
case "decKbytes", "deckbytes":
|
||||
return humanize.IBytes(uint64(value * converter.Kilobit))
|
||||
case "mbytes", "MBy":
|
||||
case "Kibit":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit / 8))
|
||||
case "decKbytes", "deckbytes", "kBy":
|
||||
return humanize.Bytes(uint64(value * converter.Kilobit))
|
||||
case "kbit":
|
||||
return humanize.Bytes(uint64(value * converter.Kilobit / 8))
|
||||
case "mbytes", "MiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit))
|
||||
case "decMbytes", "decmbytes":
|
||||
case "Mibit":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit / 8))
|
||||
case "decMbytes", "decmbytes", "MBy":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit))
|
||||
case "gbytes", "GBy":
|
||||
case "Mbit":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit / 8))
|
||||
case "gbytes", "GiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit))
|
||||
case "decGbytes", "decgbytes":
|
||||
case "Gibit":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit / 8))
|
||||
case "decGbytes", "decgbytes", "GBy":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit))
|
||||
case "tbytes", "TBy":
|
||||
case "Gbit":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit / 8))
|
||||
case "tbytes", "TiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit))
|
||||
case "decTbytes", "dectbytes":
|
||||
case "Tibit":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit / 8))
|
||||
case "decTbytes", "dectbytes", "TBy":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit))
|
||||
case "pbytes", "PBy":
|
||||
case "Tbit":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit / 8))
|
||||
case "pbytes", "PiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Pebibit))
|
||||
case "decPbytes", "decpbytes":
|
||||
case "Pbit":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit / 8))
|
||||
case "decPbytes", "decpbytes", "PBy":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit))
|
||||
case "EiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Exbibit))
|
||||
case "Ebit":
|
||||
return humanize.Bytes(uint64(value * converter.Exabit / 8))
|
||||
case "EBy":
|
||||
return humanize.Bytes(uint64(value * converter.Exabit))
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
return fmt.Sprintf("%v", value)
|
||||
|
||||
@@ -25,49 +25,66 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
|
||||
case "Bps", "By/s":
|
||||
return humanize.Bytes(uint64(value)) + "/s"
|
||||
case "binbps":
|
||||
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
// humanize.IBytes/Bytes doesn't support bits
|
||||
// and returns 0 B for values less than a byte
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b/s", value)
|
||||
}
|
||||
return humanize.IBytes(uint64(value/8)) + "/s"
|
||||
case "bps", "bit/s":
|
||||
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
case "KiBs":
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b/s", value)
|
||||
}
|
||||
return humanize.Bytes(uint64(value/8)) + "/s"
|
||||
case "KiBs", "KiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
|
||||
case "Kibits":
|
||||
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
|
||||
case "Kibits", "Kibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond/8)) + "/s"
|
||||
case "KBs", "kBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
|
||||
case "Kbits", "kbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
|
||||
case "MiBs":
|
||||
return humanize.Bytes(uint64(value*converter.KilobitPerSecond/8)) + "/s"
|
||||
case "MiBs", "MiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
|
||||
case "Mibits":
|
||||
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
|
||||
case "Mibits", "Mibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond/8)) + "/s"
|
||||
case "MBs", "MBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
|
||||
case "Mbits", "Mbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
|
||||
case "GiBs":
|
||||
return humanize.Bytes(uint64(value*converter.MegabitPerSecond/8)) + "/s"
|
||||
case "GiBs", "GiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
|
||||
case "Gibits":
|
||||
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
|
||||
case "Gibits", "Gibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond/8)) + "/s"
|
||||
case "GBs", "GBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
|
||||
case "Gbits", "Gbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
|
||||
case "TiBs":
|
||||
return humanize.Bytes(uint64(value*converter.GigabitPerSecond/8)) + "/s"
|
||||
case "TiBs", "TiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
|
||||
case "Tibits":
|
||||
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
|
||||
case "Tibits", "Tibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond/8)) + "/s"
|
||||
case "TBs", "TBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
|
||||
case "Tbits", "Tbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
|
||||
case "PiBs":
|
||||
return humanize.Bytes(uint64(value*converter.TerabitPerSecond/8)) + "/s"
|
||||
case "PiBs", "PiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
|
||||
case "Pibits":
|
||||
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
|
||||
case "Pibits", "Pibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond/8)) + "/s"
|
||||
case "PBs", "PBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
|
||||
case "Pbits", "Pbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
|
||||
return humanize.Bytes(uint64(value*converter.PetabitPerSecond/8)) + "/s"
|
||||
// Exa units
|
||||
case "EBy/s":
|
||||
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
|
||||
case "Ebit/s":
|
||||
return humanize.Bytes(uint64(value*converter.ExabitPerSecond/8)) + "/s"
|
||||
case "EiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
|
||||
case "Eibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond/8)) + "/s"
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
return fmt.Sprintf("%v", value)
|
||||
|
||||
150
pkg/query-service/formatter/data_rate_test.go
Normal file
150
pkg/query-service/formatter/data_rate_test.go
Normal file
@@ -0,0 +1,150 @@
|
||||
package formatter
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDataRateFormatterComprehensive(t *testing.T) {
|
||||
dataRateFormatter := NewDataRateFormatter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value float64
|
||||
unit string
|
||||
expected string
|
||||
}{
|
||||
// IEC Bits/sec - binbps, bps
|
||||
{name: "binbps as Bps", value: 7, unit: "binbps", expected: "7 b/s"},
|
||||
{name: "100 binbps as 12 Bps", value: 100, unit: "binbps", expected: "12 B/s"},
|
||||
{name: "binbps as 23 GiBs", value: 8 * 1024 * 1024 * 1024 * 23, unit: "binbps", expected: "23 GiB/s"},
|
||||
|
||||
// SI Bits/sec - bps, bit/s
|
||||
{name: "bps as Bps", value: 5, unit: "bps", expected: "5 b/s"},
|
||||
{name: "200 bitps as 25 Bps", value: 200, unit: "bit/s", expected: "25 B/s"},
|
||||
{name: "bitps as MBs", value: 8 * 1000 * 1000 * 7, unit: "bit/s", expected: "7.0 MB/s"},
|
||||
|
||||
// IEC Base bytes/sec - binBps
|
||||
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
|
||||
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
|
||||
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
|
||||
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
|
||||
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
|
||||
|
||||
// SI Base bytes/sec - Bps, By/s
|
||||
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
|
||||
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
|
||||
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
|
||||
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
|
||||
|
||||
// Kibibytes/sec - KiBs, KiBy/s
|
||||
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
|
||||
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
|
||||
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
|
||||
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
|
||||
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
|
||||
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
|
||||
|
||||
// Kibibits/sec - Kibits, Kibit/s
|
||||
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "128 B/s"},
|
||||
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "5.3 MiB/s"},
|
||||
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "1.3 KiB/s"},
|
||||
|
||||
// Kilobytes/sec (SI) - KBs, kBy/s
|
||||
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
|
||||
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
|
||||
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
|
||||
|
||||
// Kilobits/sec (SI) - Kbits, kbit/s
|
||||
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "125 B/s"},
|
||||
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "125 B/s"},
|
||||
|
||||
// Mebibytes/sec - MiBs, MiBy/s
|
||||
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
|
||||
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
|
||||
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
|
||||
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
|
||||
|
||||
// Mebibits/sec - Mibits, Mibit/s
|
||||
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "5.0 MiB/s"},
|
||||
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "1.3 MiB/s"},
|
||||
|
||||
// Megabytes/sec (SI) - MBs, MBy/s
|
||||
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
|
||||
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
|
||||
|
||||
// Megabits/sec (SI) - Mbits, Mbit/s
|
||||
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "125 kB/s"},
|
||||
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "125 kB/s"},
|
||||
|
||||
// Gibibytes/sec - GiBs, GiBy/s
|
||||
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
|
||||
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
|
||||
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
|
||||
|
||||
// Gibibits/sec - Gibits, Gibit/s
|
||||
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "5.3 TiB/s"},
|
||||
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "5.3 TiB/s"},
|
||||
|
||||
// Gigabytes/sec (SI) - GBs, GBy/s
|
||||
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
|
||||
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
|
||||
|
||||
// Gigabits/sec (SI) - Gbits, Gbit/s
|
||||
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "5.3 TB/s"},
|
||||
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "5.3 TB/s"},
|
||||
|
||||
// Tebibytes/sec - TiBs, TiBy/s
|
||||
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
|
||||
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
|
||||
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
|
||||
|
||||
// Tebibits/sec - Tibits, Tibit/s
|
||||
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "5.3 PiB/s"},
|
||||
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "5.3 PiB/s"},
|
||||
|
||||
// Terabytes/sec (SI) - TBs, TBy/s
|
||||
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
|
||||
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
|
||||
|
||||
// Terabits/sec (SI) - Tbits, Tbit/s
|
||||
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "5.3 PB/s"},
|
||||
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "5.3 PB/s"},
|
||||
|
||||
// Pebibytes/sec - PiBs, PiBy/s
|
||||
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
|
||||
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
|
||||
|
||||
// Pebibits/sec - Pibits, Pibit/s
|
||||
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "1.3 EiB/s"},
|
||||
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "1.3 EiB/s"},
|
||||
|
||||
// Petabytes/sec (SI) - PBs, PBy/s
|
||||
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
|
||||
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
|
||||
|
||||
// Petabits/sec (SI) - Pbits, Pbit/s
|
||||
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "5.3 PB/s"},
|
||||
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "5.3 PB/s"},
|
||||
|
||||
// Exabytes/sec (SI) - EBy/s
|
||||
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
|
||||
|
||||
// Exabits/sec (SI) - Ebit/s
|
||||
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "1.3 EB/s"},
|
||||
|
||||
// Exbibytes/sec (IEC) - EiBy/s
|
||||
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
|
||||
|
||||
// Exbibits/sec (IEC) - Eibit/s
|
||||
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "1.3 EiB/s"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataRateFormatter.Format(tt.value, tt.unit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -14,21 +14,174 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
|
||||
}
|
||||
|
||||
func TestDataFormatterComprehensive(t *testing.T) {
|
||||
dataFormatter := NewDataFormatter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value float64
|
||||
unit string
|
||||
expected string
|
||||
}{
|
||||
// IEC Bits - bits, bit
|
||||
{name: "bits: 1", value: 1, unit: "bits", expected: "1 b"},
|
||||
{name: "bits: 7", value: 7, unit: "bit", expected: "7 b"},
|
||||
{name: "bits: to MiB = 8 * 1024 * 1024 * 9", value: 8 * 1024 * 1024 * 9, unit: "bits", expected: "9.0 MiB"},
|
||||
|
||||
// SI Bits - decbits, bit
|
||||
{name: "decbits: 1", value: 1, unit: "decbits", expected: "1 b"},
|
||||
{name: "decbits: 7", value: 7, unit: "decbits", expected: "7 b"},
|
||||
{name: "decbits: to MB = 8 * 1000 * 1000 * 4", value: 8 * 1000 * 1000 * 4, unit: "decbits", expected: "4.0 MB"},
|
||||
|
||||
// IEC Base bytes - bytes, By
|
||||
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
|
||||
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
|
||||
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
|
||||
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
|
||||
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
|
||||
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
|
||||
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
|
||||
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
|
||||
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
|
||||
|
||||
// SI Base bytes - decbytes
|
||||
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
|
||||
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
|
||||
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
|
||||
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
|
||||
|
||||
// Kibibytes - kbytes, KiBy (IEC)
|
||||
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
|
||||
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
|
||||
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
|
||||
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
|
||||
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
|
||||
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
|
||||
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
|
||||
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
|
||||
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
|
||||
|
||||
// SI Kilobytes - decKbytes, deckbytes, kBy
|
||||
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
|
||||
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
|
||||
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
|
||||
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
|
||||
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
|
||||
|
||||
// Mebibytes - mbytes, MiBy (IEC)
|
||||
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
|
||||
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
|
||||
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
|
||||
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
|
||||
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
|
||||
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
|
||||
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
|
||||
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
|
||||
|
||||
// SI Megabytes - decMbytes, decmbytes, MBy
|
||||
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
|
||||
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
|
||||
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
|
||||
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
|
||||
|
||||
// Gibibytes - gbytes, GiBy (IEC)
|
||||
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
|
||||
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
|
||||
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
|
||||
|
||||
// SI Gigabytes - decGbytes, decgbytes, GBy
|
||||
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
|
||||
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
|
||||
|
||||
// Tebibytes - tbytes, TiBy (IEC)
|
||||
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
|
||||
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
|
||||
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
|
||||
|
||||
// SI Terabytes - decTbytes, dectbytes, TBy
|
||||
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
|
||||
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
|
||||
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
|
||||
|
||||
// Pebibytes - pbytes, PiBy (IEC)
|
||||
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
|
||||
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
|
||||
|
||||
// SI Petabytes - decPbytes, decpbytes, PBy
|
||||
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
|
||||
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
|
||||
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
|
||||
|
||||
// Exbibytes - EiBy (IEC)
|
||||
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
|
||||
|
||||
// Exabytes - EBy (SI)
|
||||
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
|
||||
|
||||
// Kibibits - Kibit (IEC): 1 Kibit = 1024 bits = 128 bytes
|
||||
{name: "Kibit: 1 = 128 B", value: 1, unit: "Kibit", expected: "128 B"},
|
||||
{name: "Kibit: 1024 = 128 KiB", value: 1024, unit: "Kibit", expected: "128 KiB"},
|
||||
|
||||
// Mebibits - Mibit (IEC): 1 Mibit = 1024 Kibit = 128 KiB
|
||||
{name: "Mibit: 1 = 128 KiB", value: 1, unit: "Mibit", expected: "128 KiB"},
|
||||
{name: "Mibit: 1024 = 128 MiB", value: 1024, unit: "Mibit", expected: "128 MiB"},
|
||||
|
||||
// Gibibits - Gibit (IEC): 1 Gibit = 1024 Mibit = 128 MiB
|
||||
{name: "Gibit: 1 = 128 MiB", value: 1, unit: "Gibit", expected: "128 MiB"},
|
||||
{name: "Gibit: 42*1024 = 5.3 TiB", value: 42 * 1024, unit: "Gibit", expected: "5.3 TiB"},
|
||||
|
||||
// Tebibits - Tibit (IEC): 1 Tibit = 1024 Gibit = 128 GiB
|
||||
{name: "Tibit: 1 = 128 GiB", value: 1, unit: "Tibit", expected: "128 GiB"},
|
||||
{name: "Tibit: 42*1024 = 5.3 PiB", value: 42 * 1024, unit: "Tibit", expected: "5.3 PiB"},
|
||||
|
||||
// Kilobits - kbit (SI): 1 kbit = 1000 bits = 125 bytes
|
||||
{name: "kbit: 1 = 125 B", value: 1, unit: "kbit", expected: "125 B"},
|
||||
{name: "kbit: 1000 = 125 kB", value: 1000, unit: "kbit", expected: "125 kB"},
|
||||
|
||||
// Megabits - Mbit (SI): 1 Mbit = 1000 kbit = 125 kB
|
||||
{name: "Mbit: 1 = 125 kB", value: 1, unit: "Mbit", expected: "125 kB"},
|
||||
{name: "Mbit: 1000 = 125 MB", value: 1000, unit: "Mbit", expected: "125 MB"},
|
||||
|
||||
// Gigabits - Gbit (SI): 1 Gbit = 1000 Mbit = 125 MB
|
||||
{name: "Gbit: 1 = 125 MB", value: 1, unit: "Gbit", expected: "125 MB"},
|
||||
{name: "Gbit: 42*1000 = 5.3 TB", value: 42 * 1000, unit: "Gbit", expected: "5.3 TB"},
|
||||
|
||||
// Terabits - Tbit (SI): 1 Tbit = 1000 Gbit = 125 GB
|
||||
{name: "Tbit: 1 = 125 GB", value: 1, unit: "Tbit", expected: "125 GB"},
|
||||
{name: "Tbit: 42*1000 = 5.3 PB", value: 42 * 1000, unit: "Tbit", expected: "5.3 PB"},
|
||||
|
||||
// Petabits - Pbit (SI): 1 Pbit = 1000 Tbit = 125 TB
|
||||
{name: "Pbit: 1 = 125 TB", value: 1, unit: "Pbit", expected: "125 TB"},
|
||||
{name: "Pbit: 42 = 5.3 PB", value: 42, unit: "Pbit", expected: "5.3 PB"},
|
||||
|
||||
// Exabits - Ebit (SI): 1 Ebit = 1000 Pbit = 125 PB
|
||||
{name: "Ebit: 1 = 125 PB", value: 1, unit: "Ebit", expected: "125 PB"},
|
||||
{name: "Ebit: 10 = 1.3 EB", value: 10, unit: "Ebit", expected: "1.3 EB"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataFormatter.Format(tt.value, tt.unit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,11 +18,11 @@ var (
|
||||
|
||||
func FromUnit(u string) Formatter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
|
||||
return DurationFormatter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
|
||||
return DataFormatter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
|
||||
return DataRateFormatter
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentFormatter
|
||||
|
||||
@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
|
||||
return toHours(value)
|
||||
case "d":
|
||||
return toDays(value)
|
||||
case "w":
|
||||
case "w", "wk":
|
||||
return toWeeks(value)
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
|
||||
@@ -205,7 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = matchingKey.Indexes
|
||||
key.Materialized = matchingKey.Materialized
|
||||
key.JSONPlan = matchingKey.JSONPlan
|
||||
|
||||
|
||||
return actions
|
||||
} else {
|
||||
// multiple matching keys, set materialized only if all the keys are materialized
|
||||
|
||||
@@ -483,6 +483,22 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
value1 := v.Visit(values[0])
|
||||
value2 := v.Visit(values[1])
|
||||
|
||||
switch value1.(type) {
|
||||
case float64:
|
||||
if _, ok := value2.(float64); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
case string:
|
||||
if _, ok := value2.(string); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
default:
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
@@ -855,7 +871,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -375,13 +375,6 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
|
||||
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
|
||||
}
|
||||
|
||||
if os.Getenv("INTERPOLATION_ENABLED") != "" {
|
||||
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
|
||||
if config.Flagger.Config.Boolean == nil {
|
||||
config.Flagger.Config.Boolean = make(map[string]bool)
|
||||
}
|
||||
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
|
||||
}
|
||||
}
|
||||
|
||||
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {
|
||||
|
||||
@@ -13,6 +13,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity/implidentity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -54,6 +56,7 @@ type Modules struct {
|
||||
Preference preference.Module
|
||||
User user.Module
|
||||
UserGetter user.Getter
|
||||
Identity identity.Module
|
||||
SavedView savedview.Module
|
||||
Apdex apdex.Module
|
||||
Dashboard dashboard.Module
|
||||
@@ -88,7 +91,8 @@ func NewModules(
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
|
||||
identityModule := implidentity.NewModule(implidentity.NewStore(sqlstore))
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User, identityModule)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
|
||||
@@ -101,11 +105,12 @@ func NewModules(
|
||||
Dashboard: dashboard,
|
||||
User: user,
|
||||
UserGetter: userGetter,
|
||||
Identity: identityModule,
|
||||
QuickFilter: quickfilter,
|
||||
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
|
||||
RawDataExport: implrawdataexport.NewModule(querier),
|
||||
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter, identityModule),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
|
||||
@@ -167,6 +167,11 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
|
||||
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddIdentityFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewUpdateUserIdentity(sqlstore, sqlschema),
|
||||
sqlmigration.NewMigrateUserRolesFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -75,8 +75,9 @@ func TestNewProviderFactories(t *testing.T) {
|
||||
})
|
||||
|
||||
assert.NotPanics(t, func() {
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings()))
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil)
|
||||
sqlStore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlStore, instrumentationtest.New().ToProviderSettings()))
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), nil)
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual)
|
||||
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, tokenizertest.NewMockTokenizer(t), version.Build{}, analytics.Config{Enabled: true})
|
||||
})
|
||||
|
||||
@@ -389,6 +389,8 @@ func New(
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, modules.Identity, config.User.Root)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
|
||||
@@ -438,6 +440,7 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
|
||||
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
80
pkg/sqlmigration/064_add_root_user.go
Normal file
80
pkg/sqlmigration/064_add_root_user.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addRootUser struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addRootUser{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
column := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("is_root"),
|
||||
DataType: sqlschema.DataTypeBoolean,
|
||||
Nullable: false,
|
||||
}
|
||||
|
||||
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
61
pkg/sqlmigration/065_add_user_email_org_id_index.go
Normal file
61
pkg/sqlmigration/065_add_user_email_org_id_index.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addUserEmailOrgIDIndex struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddUserEmailOrgIDIndexFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_user_email_org_id_index"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addUserEmailOrgIDIndex{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addUserEmailOrgIDIndex) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addUserEmailOrgIDIndex) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
sqls := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"email", "org_id"}})
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addUserEmailOrgIDIndex) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
111
pkg/sqlmigration/066_add_identity.go
Normal file
111
pkg/sqlmigration/066_add_identity.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addIdentity struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddIdentityFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_identity"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addIdentity{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addIdentity) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addIdentity) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
sqls := [][]byte{}
|
||||
tableSQLs := migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
|
||||
Name: "identity",
|
||||
Columns: []*sqlschema.Column{
|
||||
{Name: "id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "status", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
{Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
{Name: "org_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
},
|
||||
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{
|
||||
ColumnNames: []sqlschema.ColumnName{"id"},
|
||||
},
|
||||
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
|
||||
{
|
||||
ReferencingColumnName: sqlschema.ColumnName("org_id"),
|
||||
ReferencedTableName: sqlschema.TableName("organizations"),
|
||||
ReferencedColumnName: sqlschema.ColumnName("id"),
|
||||
},
|
||||
},
|
||||
})
|
||||
sqls = append(sqls, tableSQLs...)
|
||||
|
||||
tableSQLs = migration.sqlschema.Operator().CreateTable(&sqlschema.Table{
|
||||
Name: "identity_role",
|
||||
Columns: []*sqlschema.Column{
|
||||
{Name: "id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "identity_id", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "role_name", DataType: sqlschema.DataTypeText, Nullable: false},
|
||||
{Name: "created_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
{Name: "updated_at", DataType: sqlschema.DataTypeTimestamp, Nullable: false},
|
||||
},
|
||||
PrimaryKeyConstraint: &sqlschema.PrimaryKeyConstraint{
|
||||
ColumnNames: []sqlschema.ColumnName{"id"},
|
||||
},
|
||||
ForeignKeyConstraints: []*sqlschema.ForeignKeyConstraint{
|
||||
{
|
||||
ReferencingColumnName: sqlschema.ColumnName("identity_id"),
|
||||
ReferencedTableName: sqlschema.TableName("identity"),
|
||||
ReferencedColumnName: sqlschema.ColumnName("id"),
|
||||
},
|
||||
},
|
||||
})
|
||||
sqls = append(sqls, tableSQLs...)
|
||||
|
||||
indexSQLs := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{
|
||||
TableName: "identity_role",
|
||||
ColumnNames: []sqlschema.ColumnName{"identity_id", "role_name"},
|
||||
})
|
||||
sqls = append(sqls, indexSQLs...)
|
||||
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addIdentity) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
138
pkg/sqlmigration/067_update_user_identity.go
Normal file
138
pkg/sqlmigration/067_update_user_identity.go
Normal file
@@ -0,0 +1,138 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/identitytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type updateUserIdentity struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewUpdateUserIdentity(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("update_user_identity"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &updateUserIdentity{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *updateUserIdentity) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *updateUserIdentity) Up(ctx context.Context, db *bun.DB) error {
|
||||
// 1. Disable FK enforcement
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
// 2. Fetch existing users
|
||||
type existingUser struct {
|
||||
bun.BaseModel `bun:"table:users"`
|
||||
ID string `bun:"id"`
|
||||
OrgID string `bun:"org_id"`
|
||||
}
|
||||
var users []*existingUser
|
||||
if err := tx.NewSelect().Model(&users).Scan(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 3. Create identity records for each user
|
||||
identities := make([]*identitytypes.StorableIdentity, 0, len(users))
|
||||
for _, user := range users {
|
||||
identityID, err := valuer.NewUUID(user.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
orgID, err := valuer.NewUUID(user.OrgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
identities = append(identities, identitytypes.NewStorableIdentity(identityID, orgID))
|
||||
}
|
||||
|
||||
if len(identities) > 0 {
|
||||
if _, err := tx.NewInsert().Model(&identities).Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Get current table structure
|
||||
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 5. Get existing indices to preserve them after recreation
|
||||
indices, err := migration.sqlschema.GetIndices(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 6. Build all SQL statements
|
||||
sqls := [][]byte{}
|
||||
|
||||
// Add identity_id column using AddColumn with ColumnName("id") as value
|
||||
identityIdColumn := &sqlschema.Column{
|
||||
Name: "identity_id",
|
||||
DataType: sqlschema.DataTypeText,
|
||||
Nullable: false,
|
||||
}
|
||||
sqls = append(sqls, migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, identityIdColumn, sqlschema.ColumnName("id"))...)
|
||||
|
||||
// Add FK constraint to table definition
|
||||
table.ForeignKeyConstraints = append(table.ForeignKeyConstraints, &sqlschema.ForeignKeyConstraint{
|
||||
ReferencingColumnName: sqlschema.ColumnName("identity_id"),
|
||||
ReferencedTableName: sqlschema.TableName("identity"),
|
||||
ReferencedColumnName: sqlschema.ColumnName("id"),
|
||||
})
|
||||
|
||||
// Recreate table to apply FK constraint
|
||||
sqls = append(sqls, migration.sqlschema.Operator().RecreateTable(table, uniqueConstraints)...)
|
||||
|
||||
// Recreate indices that were lost during table recreation
|
||||
for _, index := range indices {
|
||||
sqls = append(sqls, migration.sqlschema.Operator().CreateIndex(index)...)
|
||||
}
|
||||
|
||||
// 7. Execute all SQL statements
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 8. Re-enable FK enforcement
|
||||
return migration.sqlschema.ToggleFKEnforcement(ctx, db, true)
|
||||
}
|
||||
|
||||
func (migration *updateUserIdentity) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
92
pkg/sqlmigration/068_migrate_user_roles.go
Normal file
92
pkg/sqlmigration/068_migrate_user_roles.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/identitytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
// Role name mapping from existing roles to managed role names
|
||||
var existingRoleToManagedRole = map[string]string{
|
||||
"ADMIN": "signoz-admin",
|
||||
"EDITOR": "signoz-editor",
|
||||
"VIEWER": "signoz-viewer",
|
||||
}
|
||||
|
||||
type migrateUserRoles struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewMigrateUserRolesFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("migrate_user_roles"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &migrateUserRoles{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *migrateUserRoles) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *migrateUserRoles) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
type existingUser struct {
|
||||
bun.BaseModel `bun:"table:users"`
|
||||
ID string `bun:"id"`
|
||||
Role string `bun:"role"`
|
||||
}
|
||||
var users []*existingUser
|
||||
if err := tx.NewSelect().Model(&users).Scan(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
identityRoles := make([]*identitytypes.StorableIdentityRole, 0, len(users))
|
||||
for _, user := range users {
|
||||
roleName := existingRoleToManagedRole[user.Role]
|
||||
identityID, err := valuer.NewUUID(user.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
identityRoles = append(identityRoles, identitytypes.NewStorableIdentityRole(
|
||||
identityID,
|
||||
roleName,
|
||||
))
|
||||
}
|
||||
|
||||
if len(identityRoles) > 0 {
|
||||
if _, err := tx.NewInsert().Model(&identityRoles).Exec(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *migrateUserRoles) Down(context.Context, *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
@@ -73,7 +74,7 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if b.metricsStatementBuilder.CanShortCircuitDelta(query) {
|
||||
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables); err != nil {
|
||||
return nil, err
|
||||
@@ -91,8 +92,9 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -122,13 +124,16 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
return "", nil, err
|
||||
}
|
||||
sb.SelectMore(col)
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -150,7 +155,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
return "", nil, err
|
||||
}
|
||||
}
|
||||
if filterWhere != nil {
|
||||
@@ -208,8 +213,11 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -278,7 +286,10 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
|
||||
@@ -315,25 +326,23 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -348,7 +357,15 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any) {
|
||||
) (string, []any, error) {
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
|
||||
)
|
||||
}
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -365,5 +382,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -3,6 +3,7 @@ package telemetrymeter
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -63,7 +64,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) string {
|
||||
) (string, error) {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -190,5 +191,13 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
|
||||
}
|
||||
return aggregationColumn
|
||||
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
}
|
||||
|
||||
@@ -29,13 +29,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
if operator.IsStringSearchOperator() {
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
@@ -44,6 +38,18 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
// TODO(srikanthccv): use the same data type collision handling when metrics schemas are updated
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
case []any:
|
||||
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
|
||||
if _, ok := v[0].(float64); ok {
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(tblFieldName, value), nil
|
||||
|
||||
@@ -5,67 +5,27 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/huandu/go-sqlbuilder"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
const (
|
||||
RateWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
IncreaseWithoutNegative = `If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value, ((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))`
|
||||
RateTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))`
|
||||
|
||||
RateWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window), (%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
|
||||
IncreaseWithoutNegativeMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, IF((%s - lagInFrame(%s, 1, 0) OVER rate_window) < 0, %s, ((%s - lagInFrame(%s, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window)) * (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(%d))) OVER rate_window))) AS per_series_value`
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
IncreaseTmpl = `multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value, per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)`
|
||||
|
||||
RateWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Assume linear growth to next point
|
||||
(leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected
|
||||
per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
ELSE
|
||||
-- Normal case: calculate rate
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) /
|
||||
(ts - lagInFrame(ts, 1) OVER rate_window)
|
||||
END`
|
||||
RateMultiTemporalityTmpl = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s / (ts - lagInFrame(ts, 1) OVER rate_window), (%s - lagInFrame(%s, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window))) AS per_series_value`
|
||||
|
||||
IncreaseWithInterpolation = `
|
||||
CASE
|
||||
WHEN row_number() OVER rate_window = 1 THEN
|
||||
-- First row: try to interpolate using next value
|
||||
CASE
|
||||
WHEN leadInFrame(per_series_value, 1) OVER rate_window IS NOT NULL THEN
|
||||
-- Calculate the interpolated increase for this interval
|
||||
((leadInFrame(per_series_value, 1) OVER rate_window - per_series_value) /
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)) *
|
||||
(leadInFrame(ts, 1) OVER rate_window - ts)
|
||||
ELSE
|
||||
0 -- No next value either, can't interpolate
|
||||
END
|
||||
WHEN (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0 THEN
|
||||
-- Counter reset detected: the increase is the current value
|
||||
per_series_value
|
||||
ELSE
|
||||
-- Normal case: calculate increase
|
||||
(per_series_value - lagInFrame(per_series_value, 1) OVER rate_window)
|
||||
END`
|
||||
IncreaseMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, multiIf(row_number() OVER rate_window = 1, nan, (%s - lagInFrame(%s, 1) OVER rate_window) < 0, %s, (%s - lagInFrame(%s, 1) OVER rate_window))) AS per_series_value`
|
||||
|
||||
OthersMultiTemporality = `IF(LOWER(temporality) LIKE LOWER('delta'), %s, %s) AS per_series_value`
|
||||
)
|
||||
|
||||
type MetricQueryStatementBuilder struct {
|
||||
@@ -147,54 +107,6 @@ func (b *MetricQueryStatementBuilder) Build(
|
||||
return b.buildPipelineStatement(ctx, start, end, query, keys, variables)
|
||||
}
|
||||
|
||||
// Fast‑path (no fingerprint grouping)
|
||||
// canShortCircuitDelta returns true if we can use the optimized query
|
||||
// for the given query
|
||||
// This is used to avoid the group by fingerprint thus improving the performance
|
||||
// for certain queries
|
||||
// cases where we can short circuit:
|
||||
// 1. time aggregation = (rate|increase) and space aggregation = sum
|
||||
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
|
||||
//
|
||||
// 2. time aggregation = sum and space aggregation = sum
|
||||
// - sum of sums is same as sum of all values
|
||||
//
|
||||
// 3. time aggregation = min and space aggregation = min
|
||||
// - min of mins is same as min of all values
|
||||
//
|
||||
// 4. time aggregation = max and space aggregation = max
|
||||
// - max of maxs is same as max of all values
|
||||
//
|
||||
// 5. special case exphist, there is no need for per series/fingerprint aggregation
|
||||
// we can directly use the quantilesDDMerge function
|
||||
//
|
||||
// all of this is true only for delta metrics
|
||||
func (b *MetricQueryStatementBuilder) CanShortCircuitDelta(q qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]) bool {
|
||||
if q.Aggregations[0].Temporality != metrictypes.Delta {
|
||||
return false
|
||||
}
|
||||
|
||||
ta := q.Aggregations[0].TimeAggregation
|
||||
sa := q.Aggregations[0].SpaceAggregation
|
||||
|
||||
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) && sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
|
||||
return true
|
||||
}
|
||||
if q.Aggregations[0].Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
ctx context.Context,
|
||||
start, end uint64,
|
||||
@@ -256,10 +168,11 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if b.CanShortCircuitDelta(query) {
|
||||
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
frag, args := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs)
|
||||
if frag != "" {
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(start, end, query, timeSeriesCTE, timeSeriesCTEArgs); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -273,8 +186,9 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -294,7 +208,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
timeSeriesCTEArgs []any,
|
||||
) (string, []any) {
|
||||
) (string, []any, error) {
|
||||
stepSec := int64(query.StepInterval.Seconds())
|
||||
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -307,11 +221,15 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol := AggregationColumnForSamplesTable(
|
||||
aggCol, err := AggregationColumnForSamplesTable(
|
||||
start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints,
|
||||
)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
|
||||
@@ -334,7 +252,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTimeSeriesCTE(
|
||||
@@ -437,8 +355,12 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
// TODO(srikanthccv): should it be step interval or use [start_time_unix_nano](https://github.com/open-telemetry/opentelemetry-proto/blob/d3fb76d70deb0874692bd0ebe03148580d85f3bb/opentelemetry/proto/metrics/v1/metrics.proto#L400C11-L400C31)?
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
|
||||
@@ -461,7 +383,7 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
ctx context.Context,
|
||||
_ context.Context,
|
||||
start, end uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
timeSeriesCTE string,
|
||||
@@ -479,7 +401,10 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
baseSb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
@@ -496,36 +421,25 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
innerQuery, innerArgs := baseSb.BuildWithFlavor(sqlbuilder.ClickHouse, timeSeriesCTEArgs...)
|
||||
|
||||
// ! TODO (balanikaran) Get OrgID via function parameter instead of valuer.GenerateUUID()
|
||||
interpolationEnabled := b.flagger.BooleanOrEmpty(ctx, flagger.FeatureInterpolationEnabled, featuretypes.NewFlaggerEvaluationContext(valuer.GenerateUUID()))
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegative, start, start)
|
||||
if interpolationEnabled {
|
||||
rateExpr = RateWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", RateTmpl))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(IncreaseWithoutNegative, start, start)
|
||||
if interpolationEnabled {
|
||||
incExpr = IncreaseWithInterpolation
|
||||
}
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", IncreaseTmpl))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -534,7 +448,6 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
}
|
||||
|
||||
// because RateInterpolation is not enabled anywhere due to some gaps in the logic wrt cache handling, it hasn't been considered for the multi temporality
|
||||
func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
|
||||
_ context.Context,
|
||||
start, end uint64,
|
||||
@@ -553,18 +466,32 @@ func (b *MetricQueryStatementBuilder) buildTemporalAggForMultipleTemporalities(
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
aggForDeltaTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggForCumulativeTemporality := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggForDeltaTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Delta, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
aggForCumulativeTemporality, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, metrictypes.Cumulative, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggForDeltaTemporality = fmt.Sprintf("%s/%d", aggForDeltaTemporality, stepSec)
|
||||
}
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(RateWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, aggForCumulativeTemporality, aggForCumulativeTemporality, start)
|
||||
rateExpr := fmt.Sprintf(RateMultiTemporalityTmpl,
|
||||
aggForDeltaTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
)
|
||||
sb.SelectMore(rateExpr)
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
increaseExpr := fmt.Sprintf(IncreaseWithoutNegativeMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality, start, start)
|
||||
increaseExpr := fmt.Sprintf(IncreaseMultiTemporality,
|
||||
aggForDeltaTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
aggForCumulativeTemporality, aggForCumulativeTemporality,
|
||||
)
|
||||
sb.SelectMore(increaseExpr)
|
||||
default:
|
||||
expr := fmt.Sprintf(OthersMultiTemporality, aggForDeltaTemporality, aggForCumulativeTemporality)
|
||||
@@ -592,7 +519,14 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any) {
|
||||
) (string, []any, error) {
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
)
|
||||
}
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -609,7 +543,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
}
|
||||
|
||||
func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
@@ -641,9 +575,7 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
quantile,
|
||||
))
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
for _, g := range query.GroupBy {
|
||||
sb.GroupBy(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
@@ -659,6 +591,8 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
sb.Where(rewrittenExpr)
|
||||
}
|
||||
}
|
||||
sb.OrderBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.OrderBy("ts")
|
||||
|
||||
q, a := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return &qbtypes.Statement{Query: combined + q, Args: append(args, a...)}, nil
|
||||
|
||||
@@ -50,7 +50,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -83,7 +83,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947360000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND (match(JSONExtractString(labels, 'materialized.key.name'), ?) OR JSONExtractString(labels, 'service.name') = ?) GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "cartservice", "cartservice", "signoz_calls_total", uint64(1747947360000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -116,7 +116,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_calls_total", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -148,7 +148,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, sum(value)/30 AS value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'service.name') = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_latency", uint64(1747936800000), uint64(1747983420000), "delta", false, "cartservice", "signoz_latency", uint64(1747947390000), uint64(1747983420000)},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -181,7 +181,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `host.name`, avg(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'host.name') AS `host.name` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? AND JSONExtractString(labels, 'host.name') = ? GROUP BY fingerprint, `host.name`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747936800000), uint64(1747983420000), "unspecified", false, "big-data-node-1", "system.memory.usage", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -210,7 +210,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747947390000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, `le`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(30)) AS ts, `service.name`, `le`, max(value) AS per_series_value FROM signoz_metrics.distributed_samples_v4 AS points INNER JOIN (SELECT fingerprint, JSONExtractString(labels, 'service.name') AS `service.name`, JSONExtractString(labels, 'le') AS `le` FROM signoz_metrics.time_series_v4_6hrs WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli <= ? AND LOWER(temporality) LIKE LOWER(?) AND __normalized = ? GROUP BY fingerprint, `service.name`, `le`) AS filtered_time_series ON points.fingerprint = filtered_time_series.fingerprint WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? GROUP BY fingerprint, ts, `service.name`, `le` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, `le`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`, `le`) SELECT ts, `service.name`, histogramQuantile(arrayMap(x -> toFloat64(x), groupArray(le)), groupArray(value), 0.950) AS value FROM __spatial_aggregation_cte GROUP BY `service.name`, ts ORDER BY `service.name`, ts",
|
||||
Args: []any{"http_server_duration_bucket", uint64(1747936800000), uint64(1747983420000), "cumulative", false, "http_server_duration_bucket", uint64(1747947390000), uint64(1747983420000), 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -3,6 +3,7 @@ package telemetrymetrics
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -168,7 +169,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) string {
|
||||
) (string, error) {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -298,5 +299,12 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
}
|
||||
}
|
||||
return aggregationColumn
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
}
|
||||
|
||||
@@ -35,13 +35,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
if operator.IsStringSearchOperator() {
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
|
||||
55
pkg/types/identitytypes/identity.go
Normal file
55
pkg/types/identitytypes/identity.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package identitytypes
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
var (
|
||||
IdentityStatusActive = valuer.NewString("active")
|
||||
IdentityStatusInactive = valuer.NewString("inactive")
|
||||
)
|
||||
|
||||
type StorableIdentity struct {
|
||||
bun.BaseModel `bun:"table:identity"`
|
||||
|
||||
ID valuer.UUID `bun:"id,pk,type:text" json:"id"`
|
||||
Status string `bun:"status" json:"status"`
|
||||
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
|
||||
CreatedAt time.Time `bun:"created_at" json:"createdAt"`
|
||||
UpdatedAt time.Time `bun:"updated_at" json:"updatedAt"`
|
||||
}
|
||||
|
||||
type StorableIdentityRole struct {
|
||||
bun.BaseModel `bun:"table:identity_role"`
|
||||
|
||||
ID valuer.UUID `bun:"id,pk,type:text" json:"id"`
|
||||
IdentityID valuer.UUID `bun:"identity_id" json:"identityId"`
|
||||
RoleName string `bun:"role_name" json:"roleName"`
|
||||
CreatedAt time.Time `bun:"created_at" json:"createdAt"`
|
||||
UpdatedAt time.Time `bun:"updated_at" json:"updatedAt"`
|
||||
}
|
||||
|
||||
func NewStorableIdentity(id valuer.UUID, orgID valuer.UUID) *StorableIdentity {
|
||||
now := time.Now()
|
||||
return &StorableIdentity{
|
||||
ID: id,
|
||||
Status: IdentityStatusActive.StringValue(),
|
||||
OrgID: orgID,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
}
|
||||
|
||||
func NewStorableIdentityRole(identityID valuer.UUID, roleName string) *StorableIdentityRole {
|
||||
now := time.Now()
|
||||
return &StorableIdentityRole{
|
||||
ID: valuer.GenerateUUID(),
|
||||
IdentityID: identityID,
|
||||
RoleName: roleName,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
}
|
||||
19
pkg/types/identitytypes/store.go
Normal file
19
pkg/types/identitytypes/store.go
Normal file
@@ -0,0 +1,19 @@
|
||||
package identitytypes
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Store interface {
|
||||
CreateIdentity(context.Context, *StorableIdentity) error
|
||||
CreateIdentityRole(context.Context, *StorableIdentityRole) error
|
||||
DeleteIdentity(context.Context, valuer.UUID) error
|
||||
DeleteIdentityRole(ctx context.Context, identityID valuer.UUID, roleName string) error
|
||||
DeleteIdentityRoles(context.Context, valuer.UUID) error
|
||||
GetRolesByIdentityID(context.Context, valuer.UUID) ([]*roletypes.StorableRole, error)
|
||||
CountByRoleNameAndOrgID(ctx context.Context, roleName string, orgID valuer.UUID) (int64, error)
|
||||
RunInTx(context.Context, func(ctx context.Context) error) error
|
||||
}
|
||||
@@ -25,7 +25,7 @@ type Organization struct {
|
||||
DisplayName string `bun:"display_name,type:text,notnull" json:"displayName"`
|
||||
}
|
||||
|
||||
func NewOrganization(displayName string) *Organization {
|
||||
func NewOrganization(displayName string, name string) *Organization {
|
||||
id := valuer.GenerateUUID()
|
||||
return &Organization{
|
||||
Identifiable: Identifiable{
|
||||
@@ -35,7 +35,7 @@ func NewOrganization(displayName string) *Organization {
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
},
|
||||
// Name: "default/main", TODO: take the call and uncomment this later
|
||||
Name: name,
|
||||
DisplayName: displayName,
|
||||
Key: NewOrganizationKey(id),
|
||||
}
|
||||
@@ -74,6 +74,7 @@ type TTLSetting struct {
|
||||
type OrganizationStore interface {
|
||||
Create(context.Context, *Organization) error
|
||||
Get(context.Context, valuer.UUID) (*Organization, error)
|
||||
GetByName(context.Context, string) (*Organization, error)
|
||||
GetAll(context.Context) ([]*Organization, error)
|
||||
ListByKeyRange(context.Context, uint32, uint32) ([]*Organization, error)
|
||||
Update(context.Context, *Organization) error
|
||||
|
||||
@@ -152,7 +152,9 @@ func (f FilterOperator) IsStringSearchOperator() bool {
|
||||
FilterOperatorILike,
|
||||
FilterOperatorNotILike,
|
||||
FilterOperatorLike,
|
||||
FilterOperatorNotLike:
|
||||
FilterOperatorNotLike,
|
||||
FilterOperatorRegexp,
|
||||
FilterOperatorNotRegexp:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
|
||||
@@ -3,6 +3,7 @@ package querybuildertypesv5
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
@@ -174,3 +175,54 @@ func (q *QueryBuilderQuery[T]) Normalize() {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Fast‑path (no fingerprint grouping)
|
||||
// canShortCircuitDelta returns true if we can use the optimized query
|
||||
// for the given query
|
||||
// This is used to avoid the group by fingerprint thus improving the performance
|
||||
// for certain queries
|
||||
// cases where we can short circuit:
|
||||
// 1. time aggregation = (rate|increase) and space aggregation = sum
|
||||
// - rate = sum(value)/step, increase = sum(value) - sum of sums is same as sum of all values
|
||||
//
|
||||
// 2. time aggregation = sum and space aggregation = sum
|
||||
// - sum of sums is same as sum of all values
|
||||
//
|
||||
// 3. time aggregation = min and space aggregation = min
|
||||
// - min of mins is same as min of all values
|
||||
//
|
||||
// 4. time aggregation = max and space aggregation = max
|
||||
// - max of maxs is same as max of all values
|
||||
//
|
||||
// 5. special case exphist, there is no need for per series/fingerprint aggregation
|
||||
// we can directly use the quantilesDDMerge function
|
||||
//
|
||||
// all of this is true only for delta metrics
|
||||
func CanShortCircuitDelta(metricAgg MetricAggregation) bool {
|
||||
|
||||
if metricAgg.Temporality != metrictypes.Delta {
|
||||
return false
|
||||
}
|
||||
|
||||
ta := metricAgg.TimeAggregation
|
||||
sa := metricAgg.SpaceAggregation
|
||||
|
||||
if (ta == metrictypes.TimeAggregationRate || ta == metrictypes.TimeAggregationIncrease) &&
|
||||
sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationSum && sa == metrictypes.SpaceAggregationSum {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMin && sa == metrictypes.SpaceAggregationMin {
|
||||
return true
|
||||
}
|
||||
if ta == metrictypes.TimeAggregationMax && sa == metrictypes.SpaceAggregationMax {
|
||||
return true
|
||||
}
|
||||
if metricAgg.Type == metrictypes.ExpHistogramType && sa.IsPercentile() {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
var (
|
||||
ErrColumnNotFound = errors.Newf(errors.TypeNotFound, errors.CodeNotFound, "field not found")
|
||||
ErrBetweenValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values")
|
||||
ErrBetweenValuesType = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) between operator requires two values of the number type")
|
||||
ErrInValues = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "(not) in operator requires a list of values")
|
||||
ErrUnsupportedOperator = errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "unsupported operator")
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeFormula:
|
||||
var spec QueryBuilderFormula
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderFormula
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "formula spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid formula spec: %v", err)
|
||||
}
|
||||
@@ -83,7 +83,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeJoin:
|
||||
var spec QueryBuilderJoin
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for QueryBuilderJoin
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "join spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid join spec: %v", err)
|
||||
}
|
||||
@@ -98,7 +98,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypePromQL:
|
||||
var spec PromQuery
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for PromQuery
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for PromQuery
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "PromQL spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid PromQL spec: %v", err)
|
||||
}
|
||||
@@ -106,7 +106,7 @@ func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
|
||||
case QueryTypeClickHouseSQL:
|
||||
var spec ClickHouseQuery
|
||||
// TODO: use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
|
||||
// TODO(srikanthccv): use json.Unmarshal here after implementing custom unmarshaler for ClickHouseQuery
|
||||
if err := UnmarshalJSONWithContext(shadow.Spec, &spec, "ClickHouse SQL spec"); err != nil {
|
||||
return wrapUnmarshalError(err, "invalid ClickHouse SQL spec: %v", err)
|
||||
}
|
||||
@@ -439,7 +439,7 @@ func (r *QueryRangeRequest) GetQueriesSupportingZeroDefault() map[string]bool {
|
||||
expr = strings.ToLower(expr)
|
||||
// only pure additive/counting operations should default to zero,
|
||||
// while statistical/analytical operations should show gaps when there's no data to analyze.
|
||||
// TODO: use newExprVisitor for getting the function used in the expression
|
||||
// TODO(srikanthccv): use newExprVisitor for getting the function used in the expression
|
||||
if strings.HasPrefix(expr, "count(") ||
|
||||
strings.HasPrefix(expr, "count_distinct(") ||
|
||||
strings.HasPrefix(expr, "sum(") ||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user