Compare commits

..

7 Commits

Author SHA1 Message Date
grandwizard28
3de8b3b5b2 feat: toggle foreign key constraint 2026-02-16 16:54:12 +05:30
grandwizard28
a3d74b19fc feat: handle setup completed 2026-02-16 15:59:10 +05:30
grandwizard28
7edd97b3d2 feat: add org_name to root user reconciliation 2026-02-16 15:59:09 +05:30
grandwizard28
17958d8aa0 fix: add openapi spec 2026-02-16 15:59:09 +05:30
grandwizard28
f8341790e9 fix: clean the update functions and the reconcilliation logic 2026-02-16 15:59:09 +05:30
grandwizard28
e84c6749b2 feat: fix promote to root and authz grant changes where needed 2026-02-16 15:59:09 +05:30
grandwizard28
65fd5f274c feat: add root user support with protection guards and env-based provisioning 2026-02-16 15:59:09 +05:30
46 changed files with 712 additions and 1093 deletions

View File

@@ -309,3 +309,14 @@ user:
allow_self: true
# The duration within which a user can reset their password.
max_token_lifetime: 6h
root:
# Whether to enable the root user. When enabled, a root user is provisioned
# on startup using the email and password below. The root user cannot be
# deleted, updated, or have their password changed through the UI.
enabled: false
# The email address of the root user.
email: ""
# The password of the root user. Must meet password requirements.
password: ""
# The name of the organization to create or look up for the root user.
org_name: default

View File

@@ -4678,6 +4678,8 @@ components:
type: string
id:
type: string
isRoot:
type: boolean
orgId:
type: string
role:

View File

@@ -45,7 +45,7 @@ type APIHandler struct {
}
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
RuleManager: opts.RulesManager,
@@ -58,7 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
})
}, config)
if err != nil {
return nil, err

View File

@@ -175,7 +175,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
GlobalConfig: config.Global,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
if err != nil {
return nil, err
}

View File

@@ -1542,6 +1542,10 @@ export interface TypesUserDTO {
* @type string
*/
id?: string;
/**
* @type boolean
*/
isRoot?: boolean;
/**
* @type string
*/

View File

@@ -14,6 +14,11 @@ export interface GraphVisibilityState {
dataIndex: SeriesVisibilityItem[];
}
export interface SeriesVisibilityState {
labels: string[];
visibility: boolean[];
}
/**
* Context in which a panel is rendered. Used to vary behavior (e.g. persistence,
* interactions) per context.

View File

@@ -62,10 +62,10 @@ describe('legendVisibilityUtils', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual([
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
expect(result).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('returns visibility by index including duplicate labels', () => {
@@ -85,11 +85,10 @@ describe('legendVisibilityUtils', () => {
const result = getStoredSeriesVisibility('widget-1');
expect(result).not.toBeNull();
expect(result).toEqual([
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
{ label: 'Memory', show: false },
]);
expect(result).toEqual({
labels: ['CPU', 'CPU', 'Memory'],
visibility: [true, false, false],
});
});
it('returns null on malformed JSON in localStorage', () => {
@@ -128,10 +127,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'CPU', show: true },
{ label: 'Memory', show: false },
]);
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [true, false],
});
});
it('adds a new widget entry when other widgets already exist', () => {
@@ -150,7 +149,7 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-new');
expect(stored).not.toBeNull();
expect(stored).toEqual([{ label: 'CPU', show: false }]);
expect(stored).toEqual({ labels: ['CPU'], visibility: [false] });
});
it('updates existing widget visibility when entry already exists', () => {
@@ -176,10 +175,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'CPU', show: false },
{ label: 'Memory', show: true },
]);
expect(stored).toEqual({
labels: ['CPU', 'Memory'],
visibility: [false, true],
});
});
it('silently handles malformed existing JSON without throwing', () => {
@@ -202,10 +201,10 @@ describe('legendVisibilityUtils', () => {
const stored = getStoredSeriesVisibility('widget-1');
expect(stored).not.toBeNull();
expect(stored).toEqual([
{ label: 'x-axis', show: true },
{ label: 'CPU', show: false },
]);
expect(stored).toEqual({
labels: ['x-axis', 'CPU'],
visibility: [true, false],
});
const expected = [
{
name: 'widget-1',
@@ -232,12 +231,14 @@ describe('legendVisibilityUtils', () => {
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual([
{ label: 'A', show: true },
]);
expect(getStoredSeriesVisibility('widget-b')).toEqual([
{ label: 'B', show: true },
]);
expect(getStoredSeriesVisibility('widget-a')).toEqual({
labels: ['A'],
visibility: [true],
});
expect(getStoredSeriesVisibility('widget-b')).toEqual({
labels: ['B'],
visibility: [true],
});
});
it('calls setItem with storage key and stringified visibility states', () => {

View File

@@ -1,6 +1,10 @@
import { LOCALSTORAGE } from 'constants/localStorage';
import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
import {
GraphVisibilityState,
SeriesVisibilityItem,
SeriesVisibilityState,
} from '../types';
/**
* Retrieves the stored series visibility for a specific widget from localStorage by index.
@@ -10,7 +14,7 @@ import { GraphVisibilityState, SeriesVisibilityItem } from '../types';
*/
export function getStoredSeriesVisibility(
widgetId: string,
): SeriesVisibilityItem[] | null {
): SeriesVisibilityState | null {
try {
const storedData = localStorage.getItem(LOCALSTORAGE.GRAPH_VISIBILITY_STATES);
@@ -25,7 +29,10 @@ export function getStoredSeriesVisibility(
return null;
}
return widgetState.dataIndex;
return {
labels: widgetState.dataIndex.map((item) => item.label),
visibility: widgetState.dataIndex.map((item) => item.show),
};
} catch (error) {
if (error instanceof SyntaxError) {
// If the stored data is malformed, remove it

View File

@@ -1,11 +1,11 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import BarPanel from 'container/DashboardContainer/visualization/panels/BarPanel/BarPanel';
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
import HistogramPanelWrapper from './HistogramPanelWrapper';
import ListPanelWrapper from './ListPanelWrapper';
import PiePanelWrapper from './PiePanelWrapper';
import TablePanelWrapper from './TablePanelWrapper';
import UplotPanelWrapper from './UplotPanelWrapper';
import ValuePanelWrapper from './ValuePanelWrapper';
export const PanelTypeVsPanelWrapper = {
@@ -16,7 +16,7 @@ export const PanelTypeVsPanelWrapper = {
[PANEL_TYPES.TRACE]: null,
[PANEL_TYPES.EMPTY_WIDGET]: null,
[PANEL_TYPES.PIE]: PiePanelWrapper,
[PANEL_TYPES.BAR]: BarPanel,
[PANEL_TYPES.BAR]: UplotPanelWrapper,
[PANEL_TYPES.HISTOGRAM]: HistogramPanelWrapper,
};

View File

@@ -1,4 +1,4 @@
import { SeriesVisibilityItem } from 'container/DashboardContainer/visualization/panels/types';
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
import { getStoredSeriesVisibility } from 'container/DashboardContainer/visualization/panels/utils/legendVisibilityUtils';
import { ThresholdsDrawHookOptions } from 'lib/uPlotV2/hooks/types';
import { thresholdsDrawHook } from 'lib/uPlotV2/hooks/useThresholdsDrawHook';
@@ -238,7 +238,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
/**
* Returns stored series visibility by index from localStorage when preferences source is LOCAL_STORAGE, otherwise null.
*/
private getStoredVisibility(): SeriesVisibilityItem[] | null {
private getStoredVisibility(): SeriesVisibilityState | null {
if (
this.widgetId &&
this.selectionPreferencesSource === SelectionPreferencesSource.LOCAL_STORAGE
@@ -248,98 +248,14 @@ export class UPlotConfigBuilder extends ConfigBuilder<
return null;
}
/**
* Derive visibility resolution state from stored preferences and current series:
* - visibleStoredLabels: labels that should always be visible
* - hiddenStoredLabels: labels that should always be hidden
* - hasActivePreference: whether a "mix" preference applies to new labels
*/
// eslint-disable-next-line sonarjs/cognitive-complexity
private getVisibilityResolutionState(): {
visibleStoredLabels: Set<string>;
hiddenStoredLabels: Set<string>;
hasActivePreference: boolean;
} {
const seriesVisibilityState = this.getStoredVisibility();
if (!seriesVisibilityState || seriesVisibilityState.length === 0) {
return {
visibleStoredLabels: new Set<string>(),
hiddenStoredLabels: new Set<string>(),
hasActivePreference: false,
};
}
// Single pass over stored items to derive:
// - visibleStoredLabels: any label that is ever stored as visible
// - hiddenStoredLabels: labels that are only ever stored as hidden
// - hasMixPreference: there is at least one visible and one hidden entry
const visibleStoredLabels = new Set<string>();
const hiddenStoredLabels = new Set<string>();
let hasAnyVisible = false;
let hasAnyHidden = false;
for (const { label, show } of seriesVisibilityState) {
if (show) {
hasAnyVisible = true;
visibleStoredLabels.add(label);
// If a label is ever visible, it should not be treated as "only hidden"
if (hiddenStoredLabels.has(label)) {
hiddenStoredLabels.delete(label);
}
} else {
hasAnyHidden = true;
// Only track as hidden if we have not already seen it as visible
if (!visibleStoredLabels.has(label)) {
hiddenStoredLabels.add(label);
}
}
}
const hasMixPreference = hasAnyVisible && hasAnyHidden;
// Current series labels in this chart.
const currentSeriesLabels = this.series.map(
(s: UPlotSeriesBuilder) => s.getConfig().label ?? '',
);
// Check if any stored "visible" label exists in the current series list.
const hasVisibleIntersection =
visibleStoredLabels.size > 0 &&
currentSeriesLabels.some((label) => visibleStoredLabels.has(label));
// Active preference only when there is a mix AND at least one visible
// stored label is present in the current series list.
const hasActivePreference = hasMixPreference && hasVisibleIntersection;
// We apply stored visibility in two cases:
// - There is an active preference (mix + intersection), OR
// - There is no mix (all true or all false) preserve legacy behavior.
const shouldApplyStoredVisibility = !hasMixPreference || hasActivePreference;
if (!shouldApplyStoredVisibility) {
return {
visibleStoredLabels: new Set<string>(),
hiddenStoredLabels: new Set<string>(),
hasActivePreference,
};
}
return {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
};
}
/**
* Get legend items with visibility state restored from localStorage if available
*/
getLegendItems(): Record<number, LegendItem> {
const {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
} = this.getVisibilityResolutionState();
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
);
return this.series.reduce((acc, s: UPlotSeriesBuilder, index: number) => {
const seriesConfig = s.getConfig();
@@ -347,11 +263,11 @@ export class UPlotConfigBuilder extends ConfigBuilder<
// +1 because uPlot series 0 is x-axis/time; data series are at 1, 2, ... (also matches stored visibility[0]=time, visibility[1]=first data, ...)
const seriesIndex = index + 1;
const show = resolveSeriesVisibility({
seriesIndex,
seriesShow: seriesConfig.show,
seriesLabel: label,
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
});
acc[seriesIndex] = {
@@ -380,23 +296,22 @@ export class UPlotConfigBuilder extends ConfigBuilder<
...DEFAULT_PLOT_CONFIG,
};
const {
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
} = this.getVisibilityResolutionState();
const seriesVisibilityState = this.getStoredVisibility();
const isAnySeriesHidden = !!seriesVisibilityState?.visibility?.some(
(show) => !show,
);
config.series = [
{ value: (): string => '' }, // Base series for timestamp
...this.series.map((s) => {
...this.series.map((s, index) => {
const series = s.getConfig();
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
const visible = resolveSeriesVisibility({
seriesIndex: index + 1,
seriesShow: series.show,
seriesLabel: series.label ?? '',
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
});
return {
...series,

View File

@@ -186,10 +186,11 @@ describe('UPlotConfigBuilder', () => {
});
it('restores visibility state from localStorage when selectionPreferencesSource is LOCAL_STORAGE', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'Requests', show: true },
{ label: 'Errors', show: false },
]);
// Index 0 = x-axis/time; indices 1,2 = data series (Requests, Errors). resolveSeriesVisibility matches by seriesIndex + seriesLabel.
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue({
labels: ['x-axis', 'Requests', 'Errors'],
visibility: [true, true, false],
});
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
@@ -201,7 +202,7 @@ describe('UPlotConfigBuilder', () => {
const legendItems = builder.getLegendItems();
// When any series is hidden, visibility is driven by stored label-based preferences
// When any series is hidden, legend visibility is driven by the stored map
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].show).toBe(false);
@@ -212,109 +213,6 @@ describe('UPlotConfigBuilder', () => {
expect(secondSeries?.show).toBe(false);
});
it('hides new series by default when there is a mixed preference and a visible label matches current series', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'Requests', show: true },
{ label: 'Errors', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
builder.addSeries(createSeriesProps({ label: 'Requests' }));
builder.addSeries(createSeriesProps({ label: 'Errors' }));
builder.addSeries(createSeriesProps({ label: 'Latency' }));
const legendItems = builder.getLegendItems();
// Stored labels: Requests (visible), Errors (hidden).
// New label "Latency" should be hidden because there is a mixed preference
// and "Requests" (a visible stored label) is present in the current series.
expect(legendItems[1].label).toBe('Requests');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('Errors');
expect(legendItems[2].show).toBe(false);
expect(legendItems[3].label).toBe('Latency');
expect(legendItems[3].show).toBe(false);
const config = builder.getConfig();
const [, firstSeries, secondSeries, thirdSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('Requests');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('Errors');
expect(secondSeries?.show).toBe(false);
expect(thirdSeries?.label).toBe('Latency');
expect(thirdSeries?.show).toBe(false);
});
it('shows all series when there is a mixed preference but no visible stored labels match current series', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'StoredVisible', show: true },
{ label: 'StoredHidden', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
// None of these labels intersect with the stored visible label "StoredVisible"
builder.addSeries(createSeriesProps({ label: 'CPU' }));
builder.addSeries(createSeriesProps({ label: 'Memory' }));
const legendItems = builder.getLegendItems();
// Mixed preference exists in storage, but since no visible labels intersect
// with current series, stored preferences are ignored and all are visible.
expect(legendItems[1].label).toBe('CPU');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('Memory');
expect(legendItems[2].show).toBe(true);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('CPU');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('Memory');
expect(secondSeries?.show).toBe(true);
});
it('treats duplicate labels as visible when any stored entry for that label is visible', () => {
getStoredSeriesVisibilityMock.getStoredSeriesVisibility.mockReturnValue([
{ label: 'CPU', show: true },
{ label: 'CPU', show: false },
]);
const builder = new UPlotConfigBuilder({
widgetId: 'widget-dup',
selectionPreferencesSource: SelectionPreferencesSource.LOCAL_STORAGE,
});
// Two series with the same label; both should be visible because at least
// one stored entry for "CPU" is visible.
builder.addSeries(createSeriesProps({ label: 'CPU' }));
builder.addSeries(createSeriesProps({ label: 'CPU' }));
const legendItems = builder.getLegendItems();
expect(legendItems[1].label).toBe('CPU');
expect(legendItems[1].show).toBe(true);
expect(legendItems[2].label).toBe('CPU');
expect(legendItems[2].show).toBe(true);
const config = builder.getConfig();
const [, firstSeries, secondSeries] = config.series ?? [];
expect(firstSeries?.label).toBe('CPU');
expect(firstSeries?.show).toBe(true);
expect(secondSeries?.label).toBe('CPU');
expect(secondSeries?.show).toBe(true);
});
it('does not attempt to read stored visibility when using in-memory preferences', () => {
const builder = new UPlotConfigBuilder({
widgetId: 'widget-1',

View File

@@ -1,44 +1,25 @@
/**
* Resolve the visibility of a single series based on:
* - Stored per-series visibility (when applicable)
* - Whether there is an "active preference" (mix of visible/hidden that matches current series)
* - The series' own default show flag
*/
import { SeriesVisibilityState } from 'container/DashboardContainer/visualization/panels/types';
export function resolveSeriesVisibility({
seriesIndex,
seriesShow,
seriesLabel,
visibleStoredLabels,
hiddenStoredLabels,
hasActivePreference,
seriesVisibilityState,
isAnySeriesHidden,
}: {
seriesIndex: number;
seriesShow: boolean | undefined | null;
seriesLabel: string;
visibleStoredLabels: Set<string> | null;
hiddenStoredLabels: Set<string> | null;
hasActivePreference: boolean;
seriesVisibilityState: SeriesVisibilityState | null;
isAnySeriesHidden: boolean;
}): boolean {
const isStoredVisible = !!visibleStoredLabels?.has(seriesLabel);
const isStoredHidden = !!hiddenStoredLabels?.has(seriesLabel);
// If the label is explicitly stored as visible, always show it.
if (isStoredVisible) {
return true;
if (
isAnySeriesHidden &&
seriesVisibilityState?.visibility &&
seriesVisibilityState.labels.length > seriesIndex &&
seriesVisibilityState.labels[seriesIndex] === seriesLabel
) {
return seriesVisibilityState.visibility[seriesIndex] ?? false;
}
// If the label is explicitly stored as hidden (and never stored as visible),
// always hide it.
if (isStoredHidden) {
return false;
}
// "Active preference" means:
// - There is a mix of visible/hidden in storage, AND
// - At least one stored *visible* label exists in the current series list.
// For such a preference, any new/unknown series should be hidden by default.
if (hasActivePreference) {
return false;
}
// Otherwise fall back to the series' own config or show by default.
return seriesShow ?? true;
}

View File

@@ -204,78 +204,6 @@ describe('dashboardVariablesStore', () => {
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=undefined as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: undefined,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty string selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should treat DYNAMIC variable with allSelected=true and empty array selectedValue as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: [] as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',

View File

@@ -76,7 +76,7 @@ export function getVariableDependencyContext(): VariableFetchContext {
(variable) => {
if (
variable.type === 'DYNAMIC' &&
(variable.selectedValue === null || isEmpty(variable.selectedValue)) &&
variable.selectedValue === null &&
variable.allSelected === true
) {
return true;

View File

@@ -30,3 +30,7 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
return module.store.ListByKeyRange(ctx, start, end)
}
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
return module.store.GetByName(ctx, name)
}

View File

@@ -47,6 +47,22 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
return organization, nil
}
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
organization := new(types.Organization)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(organization).
Where("name = ?", name).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
}
return organization, nil
}
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
organizations := make([]*types.Organization, 0)
err := store.

View File

@@ -14,6 +14,9 @@ type Getter interface {
// ListByOwnedKeyRange gets all the organizations owned by the instance
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
// Gets the organization by name
GetByName(context.Context, string) (*types.Organization, error)
}
type Setter interface {

View File

@@ -151,6 +151,10 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
return "", err
}
if err := user.ErrIfRoot(); err != nil {
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
}
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
if err != nil {
return "", err

View File

@@ -5,11 +5,22 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Config struct {
Password PasswordConfig `mapstructure:"password"`
Root RootConfig `mapstructure:"root"`
}
type RootConfig struct {
Enabled bool `mapstructure:"enabled"`
Email valuer.Email `mapstructure:"email"`
Password string `mapstructure:"password"`
OrgName string `mapstructure:"org_name"`
}
type PasswordConfig struct {
Reset ResetConfig `mapstructure:"reset"`
}
@@ -31,6 +42,10 @@ func newConfig() factory.Config {
MaxTokenLifetime: 6 * time.Hour,
},
},
Root: RootConfig{
Enabled: false,
OrgName: "default",
},
}
}
@@ -39,5 +54,17 @@ func (c Config) Validate() error {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
}
if c.Root.Enabled {
if c.Root.Email.IsZero() {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
}
if c.Root.Password == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
}
if !types.IsPasswordValid(c.Root.Password) {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
}
}
return nil
}

View File

@@ -16,6 +16,10 @@ func NewGetter(store types.UserStore) user.Getter {
return &getter{store: store}
}
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
return module.store.GetRootUserByOrgID(ctx, orgID)
}
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
users, err := module.store.ListUsersByOrgID(ctx, orgID)
if err != nil {

View File

@@ -103,6 +103,12 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
return nil, err
}
if existingUser != nil {
if err := existingUser.ErrIfRoot(); err != nil {
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
}
}
if existingUser != nil {
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
}
@@ -202,27 +208,21 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
return nil, err
}
if err := existingUser.ErrIfRoot(); err != nil {
return nil, errors.WithAdditionalf(err, "cannot update root user")
}
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
if err != nil {
return nil, err
}
// only displayName, role can be updated
if user.DisplayName == "" {
user.DisplayName = existingUser.DisplayName
}
if user.Role == "" {
user.Role = existingUser.Role
}
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
}
// Make sure that th e request is not demoting the last admin user.
// also an admin user can only change role of their own or other user
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
// Make sure that the request is not demoting the last admin user.
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
if err != nil {
return nil, err
@@ -233,7 +233,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
}
if user.Role != existingUser.Role {
if user.Role != "" && user.Role != existingUser.Role {
err = m.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
@@ -245,23 +245,28 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
}
user.UpdatedAt = time.Now()
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
if err != nil {
existingUser.Update(user.DisplayName, user.Role)
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
return nil, err
}
traits := types.NewTraitsFromUser(updatedUser)
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
return existingUser, nil
}
traits["updated_by"] = updatedBy
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
return nil, err
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
return err
}
return updatedUser, nil
traits := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
return err
}
return nil
}
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
@@ -270,6 +275,10 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot delete root user")
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
}
@@ -364,6 +373,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot reset password for root user")
}
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
if err != nil {
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
@@ -407,6 +420,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
return err
}
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
if err != nil {
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot reset password for root user")
}
if err := password.Update(passwd); err != nil {
return err
}
@@ -415,6 +437,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
}
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
user, err := module.store.GetUser(ctx, userID)
if err != nil {
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot change password for root user")
}
password, err := module.store.GetPasswordByUserID(ctx, userID)
if err != nil {
return err
@@ -476,7 +507,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
}
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
user, err := types.NewRootUser(name, email, organization.ID)
if err != nil {
return nil, err
}

View File

@@ -0,0 +1,187 @@
package impluser
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type service struct {
settings factory.ScopedProviderSettings
store types.UserStore
module user.Module
orgGetter organization.Getter
authz authz.AuthZ
config user.RootConfig
stopC chan struct{}
}
func NewService(
providerSettings factory.ProviderSettings,
store types.UserStore,
module user.Module,
orgGetter organization.Getter,
authz authz.AuthZ,
config user.RootConfig,
) user.Service {
return &service{
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
store: store,
module: module,
orgGetter: orgGetter,
authz: authz,
config: config,
stopC: make(chan struct{}),
}
}
func (s *service) Start(ctx context.Context) error {
if !s.config.Enabled {
<-s.stopC
return nil
}
ticker := time.NewTicker(10 * time.Second)
defer ticker.Stop()
for {
err := s.reconcile(ctx)
if err == nil {
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
<-s.stopC
return nil
}
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
select {
case <-s.stopC:
return nil
case <-ticker.C:
continue
}
}
}
func (s *service) Stop(ctx context.Context) error {
close(s.stopC)
return nil
}
func (s *service) reconcile(ctx context.Context) error {
org, err := s.orgGetter.GetByName(ctx, s.config.OrgName)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
newOrg := types.NewOrganizationWithName(s.config.OrgName)
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
return err
}
return err
}
return s.reconcileRootUser(ctx, org.ID)
}
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}
if existingRoot == nil {
return s.createOrPromoteRootUser(ctx, orgID)
}
return s.updateExistingRootUser(ctx, orgID, existingRoot)
}
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}
if existingUser != nil {
oldRole := existingUser.Role
existingUser.PromoteToRoot()
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
return err
}
if oldRole != types.RoleAdmin {
if err := s.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
); err != nil {
return err
}
}
return s.setPassword(ctx, existingUser.ID)
}
// Create new root user
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
if err != nil {
return err
}
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
if err != nil {
return err
}
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
}
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
existingRoot.PromoteToRoot()
if existingRoot.Email != s.config.Email {
existingRoot.UpdateEmail(s.config.Email)
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
return err
}
}
return s.setPassword(ctx, existingRoot.ID)
}
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
password, err := s.store.GetPasswordByUserID(ctx, userID)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return err
}
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
if err != nil {
return err
}
return s.store.CreatePassword(ctx, factorPassword)
}
if !password.Equals(s.config.Password) {
if err := password.Update(s.config.Password); err != nil {
return err
}
return s.store.UpdatePassword(ctx, password)
}
return nil
}

View File

@@ -210,20 +210,24 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
return users, nil
}
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
user.UpdatedAt = time.Now()
_, err := store.sqlstore.BunDB().NewUpdate().
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
_, err := store.
sqlstore.
BunDBCtx(ctx).
NewUpdate().
Model(user).
Column("display_name").
Column("email").
Column("role").
Column("is_root").
Column("updated_at").
Where("id = ?", id).
Where("org_id = ?", orgID).
Where("id = ?", user.ID).
Exec(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
}
return user, nil
return nil
}
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
@@ -602,6 +606,22 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
})
}
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
user := new(types.User)
err := store.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(user).
Where("org_id = ?", orgID).
Where("is_root = ?", true).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
}
return user, nil
}
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
users := []*types.User{}
err := store.

View File

@@ -0,0 +1,7 @@
package user
import "github.com/SigNoz/signoz/pkg/factory"
type Service interface {
factory.Service
}

View File

@@ -34,6 +34,9 @@ type Module interface {
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
// invite
@@ -54,6 +57,9 @@ type Module interface {
}
type Getter interface {
// Get root user by org id.
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
// Get gets the users based on the given id
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)

View File

@@ -183,7 +183,7 @@ type APIHandlerOpts struct {
}
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
querierOpts := querier.QuerierOptions{
Reader: opts.Reader,
Cache: opts.Signoz.Cache,
@@ -270,6 +270,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
}
}
// If the root user is enabled, the setup is complete
if config.User.Root.Enabled {
aH.SetupCompleted = true
}
aH.Upgrader = &websocket.Upgrader{
CheckOrigin: func(r *http.Request) bool {
return true

View File

@@ -135,7 +135,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
})
}, config)
if err != nil {
return nil, err
}

View File

@@ -43,11 +43,11 @@ var (
// FromUnit returns a converter for the given unit
func FromUnit(u Unit) Converter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationConverter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataConverter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateConverter
case "percent", "percentunit", "%":
return PercentConverter

View File

@@ -58,80 +58,36 @@ func (*dataConverter) Name() string {
return "data"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataUnit(u Unit) float64 {
switch u {
case "bytes", "By": // base 2
return Byte
case "decbytes": // base 10
return Byte
case "bits", "bit": // base 2
case "bits": // base 2
return Bit
case "decbits": // base 10
return Bit
case "kbytes", "KiBy": // base 2
case "kbytes", "kBy": // base 2
return Kibibyte
case "decKbytes", "deckbytes", "kBy": // base 10
case "decKbytes", "deckbytes": // base 10
return Kilobyte
case "mbytes", "MiBy": // base 2
case "mbytes", "MBy": // base 2
return Mebibyte
case "decMbytes", "decmbytes", "MBy": // base 10
case "decMbytes", "decmbytes": // base 10
return Megabyte
case "gbytes", "GiBy": // base 2
case "gbytes", "GBy": // base 2
return Gibibyte
case "decGbytes", "decgbytes", "GBy": // base 10
case "decGbytes", "decgbytes": // base 10
return Gigabyte
case "tbytes", "TiBy": // base 2
case "tbytes", "TBy": // base 2
return Tebibyte
case "decTbytes", "dectbytes", "TBy": // base 10
case "decTbytes", "dectbytes": // base 10
return Terabyte
case "pbytes", "PiBy": // base 2
case "pbytes", "PBy": // base 2
return Pebibyte
case "decPbytes", "decpbytes", "PBy": // base 10
case "decPbytes", "decpbytes": // base 10
return Petabyte
case "EBy": // base 10
return Exabyte
case "ZBy": // base 10
return Zettabyte
case "YBy": // base 10
return Yottabyte
case "Kibit": // base 2
return Kibibit
case "Mibit": // base 2
return Mebibit
case "Gibit": // base 2
return Gibibit
case "Tibit": // base 2
return Tebibit
case "Pibit": // base 2
return Pebibit
case "EiBy": // base 2
return Exbibyte
case "ZiBy": // base 2
return Zebibyte
case "YiBy": // base 2
return Yobibyte
case "kbit": // base 10
return Kilobit
case "Mbit": // base 10
return Megabit
case "Gbit": // base 10
return Gigabit
case "Tbit": // base 10
return Terabit
case "Pbit": // base 10
return Petabit
case "Ebit": // base 10
return Exabit
case "Zbit": // base 10
return Zettabit
case "Ybit": // base 10
return Yottabit
default:
return 1
}

View File

@@ -54,12 +54,6 @@ func (*dataRateConverter) Name() string {
return "data_rate"
}
// Notation followed by UCUM:
// https://ucum.org/ucum
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
// kilo = k, mega = M, giga = G, tera = T, peta = P
// exa = E, zetta = Z, yotta = Y
// byte = By, bit = bit
func FromDataRateUnit(u Unit) float64 {
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
switch u {
@@ -71,70 +65,46 @@ func FromDataRateUnit(u Unit) float64 {
return BitPerSecond
case "bps", "bit/s": // bits/sec(SI)
return BitPerSecond
case "KiBs", "KiBy/s": // kibibytes/sec
case "KiBs": // kibibytes/sec
return KibibytePerSecond
case "Kibits", "Kibit/s": // kibibits/sec
case "Kibits": // kibibits/sec
return KibibitPerSecond
case "KBs", "kBy/s": // kilobytes/sec
return KilobytePerSecond
case "Kbits", "kbit/s": // kilobits/sec
return KilobitPerSecond
case "MiBs", "MiBy/s": // mebibytes/sec
case "MiBs": // mebibytes/sec
return MebibytePerSecond
case "Mibits", "Mibit/s": // mebibits/sec
case "Mibits": // mebibits/sec
return MebibitPerSecond
case "MBs", "MBy/s": // megabytes/sec
return MegabytePerSecond
case "Mbits", "Mbit/s": // megabits/sec
return MegabitPerSecond
case "GiBs", "GiBy/s": // gibibytes/sec
case "GiBs": // gibibytes/sec
return GibibytePerSecond
case "Gibits", "Gibit/s": // gibibits/sec
case "Gibits": // gibibits/sec
return GibibitPerSecond
case "GBs", "GBy/s": // gigabytes/sec
return GigabytePerSecond
case "Gbits", "Gbit/s": // gigabits/sec
return GigabitPerSecond
case "TiBs", "TiBy/s": // tebibytes/sec
case "TiBs": // tebibytes/sec
return TebibytePerSecond
case "Tibits", "Tibit/s": // tebibits/sec
case "Tibits": // tebibits/sec
return TebibitPerSecond
case "TBs", "TBy/s": // terabytes/sec
return TerabytePerSecond
case "Tbits", "Tbit/s": // terabits/sec
return TerabitPerSecond
case "PiBs", "PiBy/s": // pebibytes/sec
case "PiBs": // pebibytes/sec
return PebibytePerSecond
case "Pibits", "Pibit/s": // pebibits/sec
case "Pibits": // pebibits/sec
return PebibitPerSecond
case "PBs", "PBy/s": // petabytes/sec
return PetabytePerSecond
case "Pbits", "Pbit/s": // petabits/sec
return PetabitPerSecond
case "EBy/s": // exabytes/sec
return ExabytePerSecond
case "Ebit/s": // exabits/sec
return ExabitPerSecond
case "EiBy/s": // exbibytes/sec
return ExbibytePerSecond
case "Eibit/s": // exbibits/sec
return ExbibitPerSecond
case "ZBy/s": // zettabytes/sec
return ZettabytePerSecond
case "Zbit/s": // zettabits/sec
return ZettabitPerSecond
case "ZiBy/s": // zebibytes/sec
return ZebibytePerSecond
case "Zibit/s": // zebibits/sec
return ZebibitPerSecond
case "YBy/s": // yottabytes/sec
return YottabytePerSecond
case "Ybit/s": // yottabits/sec
return YottabitPerSecond
case "YiBy/s": // yobibytes/sec
return YobibytePerSecond
case "Yibit/s": // yobibits/sec
return YobibitPerSecond
default:
return 1
}

View File

@@ -75,83 +75,3 @@ func TestDataRate(t *testing.T) {
// 1024 * 1024 * 1024 bytes = 1 gbytes
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
}
func TestDataRateConversionUCUMUnit(t *testing.T) {
dataRateConverter := NewDataRateConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Binary byte scaling
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
// Unit alias
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
// Binary bit scaling (Exbi, Zebi, Yobi)
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
// Bytes to bits (Exbi, Zebi, Yobi)
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
// 1024 bytes = 1 kbytes
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
// 1 byte = 8 bits
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
// 1 mbytes = 1024 kbytes
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
// 1024 kbytes = 1 mbytes
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
// 1 mbytes = 1024 * 1024 bytes
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
// 1024 mbytes = 1 gbytes
@@ -45,90 +45,10 @@ func TestData(t *testing.T) {
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
// 1024 tbytes = 1 PiBy
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
// 1024 tbytes = 1 pbytes
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
// 1 pbytes = 1024 tbytes
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
// 1024 TiBy = 1 pbytes
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
}
func TestDataConversionUCUMUnit(t *testing.T) {
dataConverter := NewDataConverter()
tests := []struct {
name string
input Value
toUnit Unit
expected Value
}{
// Bits to bytes
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
// Binary byte scaling
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
// SI bit scaling
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
// Binary bit scaling
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
// Bytes to bits
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
// SI byte scaling (Exa, Zetta, Yotta)
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
// Binary byte scaling (Exbi, Zebi, Yobi)
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
// SI bit scaling (Exa, Zetta, Yotta)
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataConverter.Convert(tt.input, tt.toUnit)
assert.Equal(t, tt.expected, got)
})
}
// 1024 pbytes = 1 tbytes
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
}

View File

@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
return Hour
case "d":
return Day
case "w", "wk":
case "w":
return Week
default:
return Second

View File

@@ -54,13 +54,4 @@ func TestDurationConvert(t *testing.T) {
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
// 1000000000 ns = 1 s
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
// 7 d = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
// 1 wk = 7 d
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
// 1 wk = 168 h
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
// 604800 s = 1 wk
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
}

View File

@@ -24,62 +24,30 @@ func (f *dataFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value))
case "decbytes":
return humanize.Bytes(uint64(value))
case "bits", "bit":
// humanize.IBytes doesn't support bits
// and returns 0 B for values less than a byte
if value < 8 {
return fmt.Sprintf("%v b", value)
}
return humanize.IBytes(uint64(value / 8))
case "bits":
return humanize.IBytes(uint64(value * converter.Bit))
case "decbits":
if value < 8 {
return fmt.Sprintf("%v b", value)
}
return humanize.Bytes(uint64(value / 8))
case "kbytes", "KiBy":
return humanize.Bytes(uint64(value * converter.Bit))
case "kbytes", "kBy":
return humanize.IBytes(uint64(value * converter.Kibibit))
case "Kibit":
return humanize.IBytes(uint64(value * converter.Kibibit / 8))
case "decKbytes", "deckbytes", "kBy":
return humanize.Bytes(uint64(value * converter.Kilobit))
case "kbit":
return humanize.Bytes(uint64(value * converter.Kilobit / 8))
case "mbytes", "MiBy":
case "decKbytes", "deckbytes":
return humanize.IBytes(uint64(value * converter.Kilobit))
case "mbytes", "MBy":
return humanize.IBytes(uint64(value * converter.Mebibit))
case "Mibit":
return humanize.IBytes(uint64(value * converter.Mebibit / 8))
case "decMbytes", "decmbytes", "MBy":
case "decMbytes", "decmbytes":
return humanize.Bytes(uint64(value * converter.Megabit))
case "Mbit":
return humanize.Bytes(uint64(value * converter.Megabit / 8))
case "gbytes", "GiBy":
case "gbytes", "GBy":
return humanize.IBytes(uint64(value * converter.Gibibit))
case "Gibit":
return humanize.IBytes(uint64(value * converter.Gibibit / 8))
case "decGbytes", "decgbytes", "GBy":
case "decGbytes", "decgbytes":
return humanize.Bytes(uint64(value * converter.Gigabit))
case "Gbit":
return humanize.Bytes(uint64(value * converter.Gigabit / 8))
case "tbytes", "TiBy":
case "tbytes", "TBy":
return humanize.IBytes(uint64(value * converter.Tebibit))
case "Tibit":
return humanize.IBytes(uint64(value * converter.Tebibit / 8))
case "decTbytes", "dectbytes", "TBy":
case "decTbytes", "dectbytes":
return humanize.Bytes(uint64(value * converter.Terabit))
case "Tbit":
return humanize.Bytes(uint64(value * converter.Terabit / 8))
case "pbytes", "PiBy":
case "pbytes", "PBy":
return humanize.IBytes(uint64(value * converter.Pebibit))
case "Pbit":
return humanize.Bytes(uint64(value * converter.Petabit / 8))
case "decPbytes", "decpbytes", "PBy":
case "decPbytes", "decpbytes":
return humanize.Bytes(uint64(value * converter.Petabit))
case "EiBy":
return humanize.IBytes(uint64(value * converter.Exbibit))
case "Ebit":
return humanize.Bytes(uint64(value * converter.Exabit / 8))
case "EBy":
return humanize.Bytes(uint64(value * converter.Exabit))
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -24,55 +24,50 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
return humanize.IBytes(uint64(value)) + "/s"
case "Bps", "By/s":
return humanize.Bytes(uint64(value)) + "/s"
case "KiBs", "KiBy/s":
case "binbps":
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
case "bps", "bit/s":
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
case "KiBs":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
case "Kibits", "Kibit/s":
return humanize.IBytes(uint64(value*converter.KibibitPerSecond/8)) + "/s"
case "Kibits":
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
case "KBs", "kBy/s":
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
case "Kbits", "kbit/s":
return humanize.Bytes(uint64(value*converter.KilobitPerSecond/8)) + "/s"
case "MiBs", "MiBy/s":
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
case "MiBs":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
case "Mibits", "Mibit/s":
return humanize.IBytes(uint64(value*converter.MebibitPerSecond/8)) + "/s"
case "Mibits":
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
case "MBs", "MBy/s":
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
case "Mbits", "Mbit/s":
return humanize.Bytes(uint64(value*converter.MegabitPerSecond/8)) + "/s"
case "GiBs", "GiBy/s":
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
case "GiBs":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
case "Gibits", "Gibit/s":
return humanize.IBytes(uint64(value*converter.GibibitPerSecond/8)) + "/s"
case "Gibits":
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
case "GBs", "GBy/s":
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
case "Gbits", "Gbit/s":
return humanize.Bytes(uint64(value*converter.GigabitPerSecond/8)) + "/s"
case "TiBs", "TiBy/s":
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
case "TiBs":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
case "Tibits", "Tibit/s":
return humanize.IBytes(uint64(value*converter.TebibitPerSecond/8)) + "/s"
case "Tibits":
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
case "TBs", "TBy/s":
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
case "Tbits", "Tbit/s":
return humanize.Bytes(uint64(value*converter.TerabitPerSecond/8)) + "/s"
case "PiBs", "PiBy/s":
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
case "PiBs":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
case "Pibits", "Pibit/s":
return humanize.IBytes(uint64(value*converter.PebibitPerSecond/8)) + "/s"
case "Pibits":
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
case "PBs", "PBy/s":
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
case "Pbits", "Pbit/s":
return humanize.Bytes(uint64(value*converter.PetabitPerSecond/8)) + "/s"
// Exa units
case "EBy/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
case "Ebit/s":
return humanize.Bytes(uint64(value*converter.ExabitPerSecond/8)) + "/s"
case "EiBy/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
case "Eibit/s":
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond/8)) + "/s"
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
}
// When unit is not matched, return the value as it is.
return fmt.Sprintf("%v", value)

View File

@@ -1,140 +0,0 @@
package formatter
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDataRateFormatterComprehensive(t *testing.T) {
dataRateFormatter := NewDataRateFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Base bytes/sec - binBps
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
// SI Base bytes/sec - Bps, By/s
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
// Kibibytes/sec - KiBs, KiBy/s
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
// Kibibits/sec - Kibits, Kibit/s
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "128 B/s"},
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "5.3 MiB/s"},
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "1.3 KiB/s"},
// Kilobytes/sec (SI) - KBs, kBy/s
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
// Kilobits/sec (SI) - Kbits, kbit/s
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "125 B/s"},
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "125 B/s"},
// Mebibytes/sec - MiBs, MiBy/s
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
// Mebibits/sec - Mibits, Mibit/s
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "5.0 MiB/s"},
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "1.3 MiB/s"},
// Megabytes/sec (SI) - MBs, MBy/s
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
// Megabits/sec (SI) - Mbits, Mbit/s
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "125 kB/s"},
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "125 kB/s"},
// Gibibytes/sec - GiBs, GiBy/s
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
// Gibibits/sec - Gibits, Gibit/s
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "5.3 TiB/s"},
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "5.3 TiB/s"},
// Gigabytes/sec (SI) - GBs, GBy/s
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
// Gigabits/sec (SI) - Gbits, Gbit/s
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "5.3 TB/s"},
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "5.3 TB/s"},
// Tebibytes/sec - TiBs, TiBy/s
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
// Tebibits/sec - Tibits, Tibit/s
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "5.3 PiB/s"},
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "5.3 PiB/s"},
// Terabytes/sec (SI) - TBs, TBy/s
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
// Terabits/sec (SI) - Tbits, Tbit/s
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "5.3 PB/s"},
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "5.3 PB/s"},
// Pebibytes/sec - PiBs, PiBy/s
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
// Pebibits/sec - Pibits, Pibit/s
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "1.3 EiB/s"},
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "1.3 EiB/s"},
// Petabytes/sec (SI) - PBs, PBy/s
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
// Petabits/sec (SI) - Pbits, Pbit/s
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "5.3 PB/s"},
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "5.3 PB/s"},
// Exabytes/sec (SI) - EBy/s
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
// Exabits/sec (SI) - Ebit/s
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "1.3 EB/s"},
// Exbibytes/sec (IEC) - EiBy/s
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
// Exbibits/sec (IEC) - Eibit/s
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "1.3 EiB/s"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataRateFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
}

View File

@@ -14,174 +14,21 @@ func TestData(t *testing.T) {
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
}
func TestDataFormatterComprehensive(t *testing.T) {
dataFormatter := NewDataFormatter()
tests := []struct {
name string
value float64
unit string
expected string
}{
// IEC Bits - bits, bit
{name: "bits: 1", value: 1, unit: "bits", expected: "1 b"},
{name: "bits: 7", value: 7, unit: "bit", expected: "7 b"},
{name: "bits: to MiB = 8 * 1024 * 1024 * 9", value: 8 * 1024 * 1024 * 9, unit: "bits", expected: "9.0 MiB"},
// SI Bits - decbits, bit
{name: "decbits: 1", value: 1, unit: "decbits", expected: "1 b"},
{name: "decbits: 7", value: 7, unit: "decbits", expected: "7 b"},
{name: "decbits: to MB = 8 * 1000 * 1000 * 4", value: 8 * 1000 * 1000 * 4, unit: "decbits", expected: "4.0 MB"},
// IEC Base bytes - bytes, By
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
// SI Base bytes - decbytes
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
// Kibibytes - kbytes, KiBy (IEC)
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
// SI Kilobytes - decKbytes, deckbytes, kBy
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
// Mebibytes - mbytes, MiBy (IEC)
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
// SI Megabytes - decMbytes, decmbytes, MBy
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
// Gibibytes - gbytes, GiBy (IEC)
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
// SI Gigabytes - decGbytes, decgbytes, GBy
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
// Tebibytes - tbytes, TiBy (IEC)
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
// SI Terabytes - decTbytes, dectbytes, TBy
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
// Pebibytes - pbytes, PiBy (IEC)
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
// SI Petabytes - decPbytes, decpbytes, PBy
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
// Exbibytes - EiBy (IEC)
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
// Exabytes - EBy (SI)
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
// Kibibits - Kibit (IEC): 1 Kibit = 1024 bits = 128 bytes
{name: "Kibit: 1 = 128 B", value: 1, unit: "Kibit", expected: "128 B"},
{name: "Kibit: 1024 = 128 KiB", value: 1024, unit: "Kibit", expected: "128 KiB"},
// Mebibits - Mibit (IEC): 1 Mibit = 1024 Kibit = 128 KiB
{name: "Mibit: 1 = 128 KiB", value: 1, unit: "Mibit", expected: "128 KiB"},
{name: "Mibit: 1024 = 128 MiB", value: 1024, unit: "Mibit", expected: "128 MiB"},
// Gibibits - Gibit (IEC): 1 Gibit = 1024 Mibit = 128 MiB
{name: "Gibit: 1 = 128 MiB", value: 1, unit: "Gibit", expected: "128 MiB"},
{name: "Gibit: 42*1024 = 5.3 TiB", value: 42 * 1024, unit: "Gibit", expected: "5.3 TiB"},
// Tebibits - Tibit (IEC): 1 Tibit = 1024 Gibit = 128 GiB
{name: "Tibit: 1 = 128 GiB", value: 1, unit: "Tibit", expected: "128 GiB"},
{name: "Tibit: 42*1024 = 5.3 PiB", value: 42 * 1024, unit: "Tibit", expected: "5.3 PiB"},
// Kilobits - kbit (SI): 1 kbit = 1000 bits = 125 bytes
{name: "kbit: 1 = 125 B", value: 1, unit: "kbit", expected: "125 B"},
{name: "kbit: 1000 = 125 kB", value: 1000, unit: "kbit", expected: "125 kB"},
// Megabits - Mbit (SI): 1 Mbit = 1000 kbit = 125 kB
{name: "Mbit: 1 = 125 kB", value: 1, unit: "Mbit", expected: "125 kB"},
{name: "Mbit: 1000 = 125 MB", value: 1000, unit: "Mbit", expected: "125 MB"},
// Gigabits - Gbit (SI): 1 Gbit = 1000 Mbit = 125 MB
{name: "Gbit: 1 = 125 MB", value: 1, unit: "Gbit", expected: "125 MB"},
{name: "Gbit: 42*1000 = 5.3 TB", value: 42 * 1000, unit: "Gbit", expected: "5.3 TB"},
// Terabits - Tbit (SI): 1 Tbit = 1000 Gbit = 125 GB
{name: "Tbit: 1 = 125 GB", value: 1, unit: "Tbit", expected: "125 GB"},
{name: "Tbit: 42*1000 = 5.3 PB", value: 42 * 1000, unit: "Tbit", expected: "5.3 PB"},
// Petabits - Pbit (SI): 1 Pbit = 1000 Tbit = 125 TB
{name: "Pbit: 1 = 125 TB", value: 1, unit: "Pbit", expected: "125 TB"},
{name: "Pbit: 42 = 5.3 PB", value: 42, unit: "Pbit", expected: "5.3 PB"},
// Exabits - Ebit (SI): 1 Ebit = 1000 Pbit = 125 PB
{name: "Ebit: 1 = 125 PB", value: 1, unit: "Ebit", expected: "125 PB"},
{name: "Ebit: 10 = 1.3 EB", value: 10, unit: "Ebit", expected: "1.3 EB"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := dataFormatter.Format(tt.value, tt.unit)
assert.Equal(t, tt.expected, got)
})
}
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
}

View File

@@ -18,11 +18,11 @@ var (
func FromUnit(u string) Formatter {
switch u {
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
return DurationFormatter
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
return DataFormatter
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
return DataRateFormatter
case "percent", "percentunit", "%":
return PercentFormatter

View File

@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
return toHours(value)
case "d":
return toDays(value)
case "w", "wk":
case "w":
return toWeeks(value)
}
// When unit is not matched, return the value as it is.

View File

@@ -167,6 +167,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
)
}

View File

@@ -389,6 +389,8 @@ func New(
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
@@ -438,6 +440,7 @@ func New(
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
factory.NewNamedService(factory.MustNewName("authz"), authz),
factory.NewNamedService(factory.MustNewName("user"), userService),
)
if err != nil {
return nil, err

View File

@@ -0,0 +1,80 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type addRootUser struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return &addRootUser{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
})
}
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
column := &sqlschema.Column{
Name: sqlschema.ColumnName("is_root"),
DataType: sqlschema.DataTypeBoolean,
Nullable: false,
}
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
return err
}
return nil
}
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -41,6 +41,22 @@ func NewOrganization(displayName string) *Organization {
}
}
func NewOrganizationWithName(name string) *Organization {
id := valuer.GenerateUUID()
return &Organization{
Identifiable: Identifiable{
ID: id,
},
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Name: name,
DisplayName: name,
Key: NewOrganizationKey(id),
}
}
func NewOrganizationKey(orgID valuer.UUID) uint32 {
hasher := fnv.New32a()
@@ -74,6 +90,7 @@ type TTLSetting struct {
type OrganizationStore interface {
Create(context.Context, *Organization) error
Get(context.Context, valuer.UUID) (*Organization, error)
GetByName(context.Context, string) (*Organization, error)
GetAll(context.Context) ([]*Organization, error)
ListByKeyRange(context.Context, uint32, uint32) ([]*Organization, error)
Update(context.Context, *Organization) error

View File

@@ -265,46 +265,6 @@ func TestBasicRuleThresholdEval_UnitConversion(t *testing.T) {
ruleUnit: "",
shouldAlert: true,
},
// bytes and Gibibytes,
// rule will only fire if target is converted to bytes so that the sample value becomes lower than the target 100GiBy
{
name: "bytes to Gibibytes - should alert",
threshold: BasicRuleThreshold{
Name: CriticalThresholdName,
TargetValue: &target, // 100 Gibibytes
TargetUnit: "GiBy",
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 70 * 1024 * 1024 * 1024, Timestamp: 1000}, // 70 Gibibytes
},
},
ruleUnit: "bytes",
shouldAlert: true,
},
// data Rate conversion - bytes per second to MiB per second
// rule will only fire if target is converted to bytes so that the sample value becomes lower than the target 100 MiB/s
{
name: "bytes per second to MiB per second - should alert",
threshold: BasicRuleThreshold{
Name: CriticalThresholdName,
TargetValue: &target, // 100 MiB/s
TargetUnit: "MiBy/s",
MatchType: AtleastOnce,
CompareOp: ValueIsBelow,
},
series: v3.Series{
Labels: map[string]string{"service": "test"},
Points: []v3.Point{
{Value: 30 * 1024 * 1024, Timestamp: 1000}, // 30 MiB/s
},
},
ruleUnit: "By/s",
shouldAlert: true,
},
}
for _, tt := range tests {

View File

@@ -11,15 +11,16 @@ import (
)
var (
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
ErrCodeRootUserOperationUnsupported = errors.MustNewCode("root_user_operation_unsupported")
)
type GettableUser = User
@@ -29,9 +30,10 @@ type User struct {
Identifiable
DisplayName string `bun:"display_name" json:"displayName"`
Email valuer.Email `bun:"email,type:text" json:"email"`
Role Role `bun:"role,type:text" json:"role"`
OrgID valuer.UUID `bun:"org_id,type:text" json:"orgId"`
Email valuer.Email `bun:"email" json:"email"`
Role Role `bun:"role" json:"role"`
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
IsRoot bool `bun:"is_root" json:"isRoot"`
TimeAuditable
}
@@ -64,6 +66,7 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
Email: email,
Role: role,
OrgID: orgID,
IsRoot: false,
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
@@ -71,6 +74,65 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
}, nil
}
func NewRootUser(displayName string, email valuer.Email, orgID valuer.UUID) (*User, error) {
if email.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
}
if orgID.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required")
}
return &User{
Identifiable: Identifiable{
ID: valuer.GenerateUUID(),
},
DisplayName: displayName,
Email: email,
Role: RoleAdmin,
OrgID: orgID,
IsRoot: true,
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
}, nil
}
// Update applies mutable fields from the input to the user. Immutable fields
// (email, is_root, org_id, id) are preserved. Only non-zero input fields are applied.
func (u *User) Update(displayName string, role Role) {
if displayName != "" {
u.DisplayName = displayName
}
if role != "" {
u.Role = role
}
u.UpdatedAt = time.Now()
}
// PromoteToRoot promotes the user to a root user with admin role.
func (u *User) PromoteToRoot() {
u.IsRoot = true
u.Role = RoleAdmin
u.UpdatedAt = time.Now()
}
// UpdateEmail updates the email of the user.
func (u *User) UpdateEmail(email valuer.Email) {
u.Email = email
u.UpdatedAt = time.Now()
}
// ErrIfRoot returns an error if the user is a root user. The caller should
// enrich the error with the specific operation using errors.WithAdditionalf.
func (u *User) ErrIfRoot() error {
if u.IsRoot {
return errors.New(errors.TypeUnsupported, ErrCodeRootUserOperationUnsupported, "this operation is not supported for the root user")
}
return nil
}
func NewTraitsFromUser(user *User) map[string]any {
return map[string]any{
"name": user.DisplayName,
@@ -133,7 +195,7 @@ type UserStore interface {
// List users by email and org ids.
ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*User, error)
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *User) (*User, error)
UpdateUser(ctx context.Context, orgID valuer.UUID, user *User) error
DeleteUser(ctx context.Context, orgID string, id string) error
// Creates a password.
@@ -156,6 +218,9 @@ type UserStore interface {
CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
// Get root user by org.
GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*User, error)
// Transaction
RunInTx(ctx context.Context, cb func(ctx context.Context) error) error
}