mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-18 15:02:35 +00:00
Compare commits
9 Commits
fix/extern
...
platform-p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ba4e93050e | ||
|
|
7f1d350ffe | ||
|
|
1d3134959d | ||
|
|
b86bd24dd9 | ||
|
|
4c49d45cbf | ||
|
|
9b3d3453b1 | ||
|
|
9d981d8a13 | ||
|
|
6de4520a95 | ||
|
|
f566909320 |
@@ -176,25 +176,6 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
|
||||
|
||||
### Unsere Projektbetreuer
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Dokumentation
|
||||
|
||||
28
README.md
28
README.md
@@ -221,34 +221,6 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
|
||||
|
||||
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
|
||||
|
||||
### Project maintainers
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
|
||||
- [Ekansh Gupta](https://github.com/eKuG)
|
||||
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Vikrant Gupta](https://github.com/vikrantgupta25)
|
||||
- [Sagar Rajput](https://github.com/SagarRajput-7)
|
||||
- [Shaheer Kochai](https://github.com/ahmadshaheer)
|
||||
- [Amlan Kumar Nandy](https://github.com/amlannandy)
|
||||
- [Sahil Khan](https://github.com/sawhil)
|
||||
- [Aditya Singh](https://github.com/aks07)
|
||||
- [Abhi Kumar](https://github.com/ahrefabhi)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
|
||||
@@ -187,25 +187,6 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
|
||||
|
||||
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
|
||||
|
||||
### 项目维护人员
|
||||
|
||||
#### 后端
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### 前端
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### 运维开发
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 文档
|
||||
|
||||
@@ -294,7 +294,6 @@ flagger:
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
@@ -309,3 +308,14 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
# deleted, updated, or have their password changed through the UI.
|
||||
enabled: false
|
||||
# The email address of the root user.
|
||||
email: ""
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
|
||||
@@ -4678,6 +4678,8 @@ components:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
isRoot:
|
||||
type: boolean
|
||||
orgId:
|
||||
type: string
|
||||
role:
|
||||
|
||||
@@ -45,7 +45,7 @@ type APIHandler struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
RuleManager: opts.RulesManager,
|
||||
@@ -58,7 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -170,14 +170,14 @@ func (ah *APIHandler) getOrCreateCloudIntegrationUser(
|
||||
cloudIntegrationUserName := fmt.Sprintf("%s-integration", cloudProvider)
|
||||
email := valuer.MustNewEmail(fmt.Sprintf("%s@signoz.io", cloudIntegrationUserName))
|
||||
|
||||
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, types.RoleViewer, valuer.MustNewUUID(orgId))
|
||||
cloudIntegrationUser, err := types.NewUser(cloudIntegrationUserName, email, valuer.MustNewUUID(orgId))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't create cloud integration user: %w", err))
|
||||
}
|
||||
|
||||
password := types.MustGenerateFactorPassword(cloudIntegrationUser.ID.StringValue())
|
||||
|
||||
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password))
|
||||
cloudIntegrationUser, err = ah.Signoz.Modules.User.GetOrCreateUser(ctx, cloudIntegrationUser, user.WithFactorPassword(password), user.WithRole(types.RoleViewer))
|
||||
if err != nil {
|
||||
return nil, basemodel.InternalError(fmt.Errorf("couldn't look for integration user: %w", err))
|
||||
}
|
||||
|
||||
@@ -175,7 +175,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1542,6 +1542,10 @@ export interface TypesUserDTO {
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isRoot?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
@@ -202,7 +202,7 @@ function AllEndPoints({
|
||||
|
||||
const onRowClick = useCallback(
|
||||
(props: any): void => {
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.HTTP_URL] as string);
|
||||
setSelectedEndPointName(props[SPAN_ATTRIBUTES.URL_PATH] as string);
|
||||
setSelectedView(VIEWS.ENDPOINT_STATS);
|
||||
const initialItems = [
|
||||
...(filters?.items || []),
|
||||
@@ -213,7 +213,7 @@ function AllEndPoints({
|
||||
op: 'AND',
|
||||
});
|
||||
setParams({
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.HTTP_URL] as string,
|
||||
selectedEndPointName: props[SPAN_ATTRIBUTES.URL_PATH] as string,
|
||||
selectedView: VIEWS.ENDPOINT_STATS,
|
||||
endPointDetailsLocalFilters: {
|
||||
items: initialItems,
|
||||
|
||||
@@ -33,7 +33,7 @@ import { SPAN_ATTRIBUTES } from './constants';
|
||||
|
||||
const httpUrlKey = {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
};
|
||||
|
||||
@@ -93,7 +93,7 @@ function EndPointDetails({
|
||||
return currentFilters; // No change needed, prevents loop
|
||||
}
|
||||
|
||||
// Rebuild filters: Keep non-http_url filters and add/update http_url filter based on prop
|
||||
// Rebuild filters: Keep non-http.url filters and add/update http.url filter based on prop
|
||||
const otherFilters = currentFilters?.items?.filter(
|
||||
(item) => item.key?.key !== httpUrlKey.key,
|
||||
);
|
||||
@@ -125,7 +125,7 @@ function EndPointDetails({
|
||||
(newFilters: IBuilderQuery['filters']): void => {
|
||||
// 1. Update local filters state immediately
|
||||
setFilters(newFilters);
|
||||
// Filter out http_url filter before saving to params
|
||||
// Filter out http.url filter before saving to params
|
||||
const filteredNewFilters = {
|
||||
op: 'AND',
|
||||
items:
|
||||
@@ -299,6 +299,7 @@ function EndPointDetails({
|
||||
endPointStatusCodeLatencyBarChartsDataQuery
|
||||
}
|
||||
domainName={domainName}
|
||||
endPointName={endPointName}
|
||||
filters={filters}
|
||||
timeRange={timeRange}
|
||||
onDragSelect={onDragSelect}
|
||||
|
||||
@@ -56,15 +56,15 @@ function TopErrors({
|
||||
{
|
||||
items: endPointName
|
||||
? [
|
||||
// Remove any existing http_url filters from initialFilters to avoid duplicates
|
||||
// Remove any existing http.url filters from initialFilters to avoid duplicates
|
||||
...(initialFilters?.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.HTTP_URL,
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
) || []),
|
||||
{
|
||||
id: '92b8a1c1',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
|
||||
@@ -9,7 +9,6 @@ import { ENTITY_VERSION_V5 } from 'constants/app';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../constants';
|
||||
import DomainMetrics from './DomainMetrics';
|
||||
|
||||
// Mock the API call
|
||||
@@ -127,9 +126,11 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'count()',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryA.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryA.filter.expression).toContain(
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain(
|
||||
'url.full EXISTS OR http.url EXISTS',
|
||||
);
|
||||
|
||||
// Verify Query B - p99 latency
|
||||
@@ -141,13 +142,17 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'p99(duration_nano)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryB.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryB.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Query C - error count (disabled)
|
||||
const queryC = queryData.find((q: any) => q.queryName === 'C');
|
||||
expect(queryC).toBeDefined();
|
||||
expect(queryC.disabled).toBe(true);
|
||||
expect(queryC.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryC.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
expect(queryC.aggregations?.[0]).toBeDefined();
|
||||
expect((queryC.aggregations?.[0] as TraceAggregation)?.expression).toBe(
|
||||
'count()',
|
||||
@@ -164,7 +169,9 @@ describe('DomainMetrics - V5 Query Payload Tests', () => {
|
||||
'max(timestamp)',
|
||||
);
|
||||
// Verify exact domain filter expression structure
|
||||
expect(queryD.filter.expression).toContain("http_host = '0.0.0.0'");
|
||||
expect(queryD.filter.expression).toContain(
|
||||
"(net.peer.name = '0.0.0.0' OR server.address = '0.0.0.0')",
|
||||
);
|
||||
|
||||
// Verify Formula F1 - error rate calculation
|
||||
const formulas = payload.query.builder.queryFormulas;
|
||||
|
||||
@@ -153,7 +153,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryA.filter) {
|
||||
expect(queryA.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryA.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -171,7 +171,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryB.filter) {
|
||||
expect(queryB.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryB.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -185,7 +185,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(queryC.aggregateOperator).toBe('count');
|
||||
if (queryC.filter) {
|
||||
expect(queryC.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryC.filter.expression).toContain("kind_string = 'Client'");
|
||||
expect(queryC.filter.expression).toContain('has_error = true');
|
||||
@@ -204,7 +204,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
// Verify exact domain filter expression structure
|
||||
if (queryD.filter) {
|
||||
expect(queryD.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryD.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -221,7 +221,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
}
|
||||
if (queryE.filter) {
|
||||
expect(queryE.filter.expression).toContain(
|
||||
`http_host = 'api.example.com'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com')",
|
||||
);
|
||||
expect(queryE.filter.expression).toContain("kind_string = 'Client'");
|
||||
}
|
||||
@@ -291,7 +291,7 @@ describe('EndPointMetrics - V5 Query Payload Tests', () => {
|
||||
expect(query.filter.expression).toContain('staging');
|
||||
// Also verify domain filter is still present
|
||||
expect(query.filter.expression).toContain(
|
||||
"http_host = 'api.internal.com'",
|
||||
"(net.peer.name = 'api.internal.com' OR server.address = 'api.internal.com')",
|
||||
);
|
||||
// Verify client kind filter is present
|
||||
expect(query.filter.expression).toContain("kind_string = 'Client'");
|
||||
|
||||
@@ -34,6 +34,7 @@ function StatusCodeBarCharts({
|
||||
endPointStatusCodeBarChartsDataQuery,
|
||||
endPointStatusCodeLatencyBarChartsDataQuery,
|
||||
domainName,
|
||||
endPointName,
|
||||
filters,
|
||||
timeRange,
|
||||
onDragSelect,
|
||||
@@ -47,6 +48,7 @@ function StatusCodeBarCharts({
|
||||
unknown
|
||||
>;
|
||||
domainName: string;
|
||||
endPointName: string;
|
||||
filters: IBuilderQuery['filters'];
|
||||
timeRange: {
|
||||
startTime: number;
|
||||
@@ -142,11 +144,11 @@ function StatusCodeBarCharts({
|
||||
|
||||
const widget = useMemo<Widgets>(
|
||||
() =>
|
||||
getStatusCodeBarChartWidgetData(domainName, {
|
||||
getStatusCodeBarChartWidgetData(domainName, endPointName, {
|
||||
items: [...(filters?.items || [])],
|
||||
op: filters?.op || 'AND',
|
||||
}),
|
||||
[domainName, filters],
|
||||
[domainName, endPointName, filters],
|
||||
);
|
||||
|
||||
const graphClickHandler = useCallback(
|
||||
@@ -164,7 +166,6 @@ function StatusCodeBarCharts({
|
||||
xValue,
|
||||
TWO_AND_HALF_MINUTES_IN_MILLISECONDS,
|
||||
);
|
||||
|
||||
handleGraphClick({
|
||||
xValue,
|
||||
yValue,
|
||||
|
||||
@@ -12,8 +12,8 @@ export const VIEW_TYPES = {
|
||||
|
||||
// Span attribute keys - these are the source of truth for all attribute keys
|
||||
export const SPAN_ATTRIBUTES = {
|
||||
HTTP_URL: 'http_url',
|
||||
URL_PATH: 'http.url',
|
||||
RESPONSE_STATUS_CODE: 'response_status_code',
|
||||
SERVER_NAME: 'http_host',
|
||||
SERVER_NAME: 'net.peer.name',
|
||||
SERVER_PORT: 'net.peer.port',
|
||||
} as const;
|
||||
|
||||
@@ -280,7 +280,7 @@ describe('API Monitoring Utils', () => {
|
||||
const endpointFilter = result?.items?.find(
|
||||
(item) =>
|
||||
item.key &&
|
||||
item.key.key === SPAN_ATTRIBUTES.HTTP_URL &&
|
||||
item.key.key === SPAN_ATTRIBUTES.URL_PATH &&
|
||||
item.value === endPointName,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
@@ -344,12 +344,13 @@ describe('API Monitoring Utils', () => {
|
||||
describe('getFormattedEndPointDropDownData', () => {
|
||||
it('should format endpoint dropdown data correctly', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
const mockData = [
|
||||
{
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/users',
|
||||
'url.full': 'http://example.com/api/users',
|
||||
A: 150, // count or other metric
|
||||
},
|
||||
},
|
||||
@@ -357,6 +358,7 @@ describe('API Monitoring Utils', () => {
|
||||
data: {
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
[URL_PATH_KEY]: '/api/orders',
|
||||
'url.full': 'http://example.com/api/orders',
|
||||
A: 75,
|
||||
},
|
||||
},
|
||||
@@ -404,7 +406,7 @@ describe('API Monitoring Utils', () => {
|
||||
|
||||
it('should handle items without URL path', () => {
|
||||
// Arrange
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.HTTP_URL;
|
||||
const URL_PATH_KEY = SPAN_ATTRIBUTES.URL_PATH;
|
||||
type MockDataType = {
|
||||
data: {
|
||||
[key: string]: string | number;
|
||||
@@ -710,11 +712,13 @@ describe('API Monitoring Utils', () => {
|
||||
it('should generate widget configuration for status code bar chart', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const filters = { items: [], op: 'AND' };
|
||||
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
@@ -737,11 +741,21 @@ describe('API Monitoring Utils', () => {
|
||||
if (domainFilter) {
|
||||
expect(domainFilter.value).toBe(domainName);
|
||||
}
|
||||
|
||||
// Should have endpoint filter if provided
|
||||
const endpointFilter = queryData.filters?.items?.find(
|
||||
(item) => item.key && item.key.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
expect(endpointFilter).toBeDefined();
|
||||
if (endpointFilter) {
|
||||
expect(endpointFilter.value).toBe(endPointName);
|
||||
}
|
||||
});
|
||||
|
||||
it('should include custom filters in the widget configuration', () => {
|
||||
// Arrange
|
||||
const domainName = 'test-domain';
|
||||
const endPointName = '/api/test';
|
||||
const customFilter = {
|
||||
id: 'custom-filter',
|
||||
key: {
|
||||
@@ -757,6 +771,7 @@ describe('API Monitoring Utils', () => {
|
||||
// Act
|
||||
const result = getStatusCodeBarChartWidgetData(
|
||||
domainName,
|
||||
endPointName,
|
||||
filters as IBuilderQuery['filters'],
|
||||
);
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ jest.mock('container/GridCardLayout/GridCard', () => ({
|
||||
type="button"
|
||||
data-testid="row-click-button"
|
||||
onClick={(): void =>
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test' })
|
||||
customOnRowClick({ [SPAN_ATTRIBUTES.URL_PATH]: '/api/test' })
|
||||
}
|
||||
>
|
||||
Click Row
|
||||
|
||||
@@ -6,10 +6,10 @@
|
||||
* These tests validate the migration from V4 to V5 format for getAllEndpointsWidgetData:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Aggregation format: aggregateAttribute → aggregations[] array
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Four queries: A (count), B (p99 latency), C (max timestamp), D (error count - disabled)
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getAllEndpointsWidgetData } from 'container/ApiMonitoring/utils';
|
||||
import {
|
||||
@@ -18,8 +18,6 @@ import {
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const emptyFilters: IBuilderQuery['filters'] = {
|
||||
@@ -94,28 +92,28 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
const [queryA, queryB, queryC, queryD] = widget.query.builder.queryData;
|
||||
|
||||
const baseExpression = `http_host = '${mockDomainName}' AND kind_string = 'Client'`;
|
||||
const baseExpression = `(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') AND kind_string = 'Client'`;
|
||||
|
||||
// Queries A, B, C have identical base filter
|
||||
expect(queryA.filter?.expression).toBe(
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryB.filter?.expression).toBe(
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
expect(queryC.filter?.expression).toBe(
|
||||
`${baseExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
|
||||
// Query D has additional has_error filter
|
||||
expect(queryD.filter?.expression).toBe(
|
||||
`${baseExpression} AND has_error = true AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${baseExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('2. GroupBy Structure', () => {
|
||||
it(`default groupBy includes ${SPAN_ATTRIBUTES.HTTP_URL} with type attribute`, () => {
|
||||
it('default groupBy includes both http.url and url.full with type attribute', () => {
|
||||
const widget = getAllEndpointsWidgetData(
|
||||
emptyGroupBy,
|
||||
mockDomainName,
|
||||
@@ -126,13 +124,23 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have the same default groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(1);
|
||||
expect(query.groupBy).toHaveLength(2);
|
||||
|
||||
// http.url
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: 'http.url',
|
||||
type: 'attribute',
|
||||
});
|
||||
|
||||
// url.full
|
||||
expect(query.groupBy).toContainEqual({
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -162,18 +170,19 @@ describe('AllEndpointsWidget - V5 Migration Validation', () => {
|
||||
|
||||
// All queries should have defaults + custom groupBy
|
||||
queryData.forEach((query) => {
|
||||
expect(query.groupBy).toHaveLength(3); // 1 default + 2 custom
|
||||
expect(query.groupBy).toHaveLength(4); // 2 defaults + 2 custom
|
||||
|
||||
// First two should be defaults (http_url)
|
||||
expect(query.groupBy[0].key).toBe(SPAN_ATTRIBUTES.HTTP_URL);
|
||||
// First two should be defaults (http.url, url.full)
|
||||
expect(query.groupBy[0].key).toBe('http.url');
|
||||
expect(query.groupBy[1].key).toBe('url.full');
|
||||
|
||||
// Last two should be custom (matching subset of properties)
|
||||
expect(query.groupBy[1]).toMatchObject({
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'service.name',
|
||||
type: 'resource',
|
||||
});
|
||||
expect(query.groupBy[2]).toMatchObject({
|
||||
expect(query.groupBy[3]).toMatchObject({
|
||||
dataType: DataTypes.String,
|
||||
key: 'deployment.environment',
|
||||
type: 'resource',
|
||||
|
||||
@@ -258,7 +258,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -278,7 +278,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -360,7 +360,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
@@ -373,7 +373,7 @@ describe('EndPointDetails Component', () => {
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.URL_PATH }),
|
||||
value: '/api/test',
|
||||
}),
|
||||
]),
|
||||
|
||||
@@ -191,7 +191,7 @@ describe('EndPointsDropDown Component', () => {
|
||||
|
||||
it('formats data using the utility function', () => {
|
||||
const mockRows = [
|
||||
{ data: { [SPAN_ATTRIBUTES.HTTP_URL]: '/api/test', A: 10 } },
|
||||
{ data: { [SPAN_ATTRIBUTES.URL_PATH]: '/api/test', A: 10 } },
|
||||
];
|
||||
|
||||
const dataProps = {
|
||||
|
||||
@@ -6,18 +6,15 @@
|
||||
* These tests validate the migration from V4 to V5 format for the third payload
|
||||
* in getEndPointDetailsQueryPayload (endpoint dropdown data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain handling: http_host = '${domainName}'
|
||||
* - Domain handling: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Existence check: http_url EXISTS
|
||||
* - Existence check: (http.url EXISTS OR url.full EXISTS)
|
||||
* - Aggregation: count() expression
|
||||
* - GroupBy: http_url with type 'attribute'
|
||||
* - GroupBy: Both http.url AND url.full with type 'attribute'
|
||||
*/
|
||||
import { getEndPointDetailsQueryPayload } from 'container/ApiMonitoring/utils';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
const mockDomainName = 'api.example.com';
|
||||
const mockStartTime = 1000;
|
||||
@@ -46,9 +43,9 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters');
|
||||
|
||||
// Base filter 1: Domain http_host = '${domainName}'
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -56,7 +53,7 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Base filter 3: Existence check
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
'(http.url EXISTS OR url.full EXISTS)',
|
||||
);
|
||||
|
||||
// V5 Aggregation format: aggregations array (not aggregateAttribute)
|
||||
@@ -67,11 +64,16 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
});
|
||||
expect(queryA).not.toHaveProperty('aggregateAttribute');
|
||||
|
||||
// GroupBy: http_url
|
||||
expect(queryA.groupBy).toHaveLength(1);
|
||||
// GroupBy: Both http.url and url.full
|
||||
expect(queryA.groupBy).toHaveLength(2);
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
dataType: DataTypes.String,
|
||||
key: 'http.url',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
expect(queryA.groupBy).toContainEqual({
|
||||
key: 'url.full',
|
||||
dataType: 'string',
|
||||
type: 'attribute',
|
||||
});
|
||||
});
|
||||
@@ -118,7 +120,53 @@ describe('EndpointDropdown - V5 Migration Validation', () => {
|
||||
|
||||
// Exact filter expression with custom filters merged
|
||||
expect(expression).toBe(
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = 'api.example.com' AND kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS service.name = 'user-service' AND deployment.environment = 'production'`,
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND deployment.environment = 'production'",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('3. HTTP URL Filter Special Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const dropdownQuery = payload[2];
|
||||
const expression =
|
||||
dropdownQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: Exact filter expression with http.url converted to OR logic
|
||||
expect(expression).toBe(
|
||||
"(net.peer.name = 'api.example.com' OR server.address = 'api.example.com') AND kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) service.name = 'user-service' AND (http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -33,7 +33,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
const widget = getRateOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -44,7 +44,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -90,7 +90,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Verify custom filters are merged into the expression
|
||||
@@ -120,7 +120,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
expect(queryData).not.toHaveProperty('filters.items');
|
||||
});
|
||||
|
||||
it('uses new domain filter format: (http_host)', () => {
|
||||
it('uses new domain filter format: (net.peer.name OR server.address)', () => {
|
||||
const widget = getLatencyOverTimeWidgetData(
|
||||
mockDomainName,
|
||||
mockEndpointName,
|
||||
@@ -132,7 +132,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
// Verify EXACT new filter format with OR operator
|
||||
expect(queryData.filter).toBeDefined();
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Endpoint name is used in legend, not filter
|
||||
@@ -166,7 +166,7 @@ describe('MetricOverTime - V5 Migration Validation', () => {
|
||||
|
||||
// Verify domain filter is present
|
||||
expect(queryData?.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}' service.name = 'user-service'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}') service.name = 'user-service'`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,6 +142,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endTime: 1609545600000,
|
||||
};
|
||||
const mockDomainName = 'test-domain';
|
||||
const mockEndPointName = '/api/test';
|
||||
const onDragSelectMock = jest.fn();
|
||||
const refetchFn = jest.fn();
|
||||
|
||||
@@ -231,6 +232,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -266,6 +268,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -308,6 +311,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -352,6 +356,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -399,6 +404,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockFilters}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -413,6 +419,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
// but we've confirmed the function is mocked and ready to be tested
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: [],
|
||||
op: 'AND',
|
||||
@@ -460,6 +467,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
endPointStatusCodeBarChartsDataQuery={mockStatusCodeQuery as any}
|
||||
endPointStatusCodeLatencyBarChartsDataQuery={mockLatencyQuery as any}
|
||||
domainName={mockDomainName}
|
||||
endPointName={mockEndPointName}
|
||||
filters={mockCustomFilters as IBuilderQuery['filters']}
|
||||
timeRange={mockTimeRange}
|
||||
onDragSelect={onDragSelectMock}
|
||||
@@ -469,6 +477,7 @@ describe('StatusCodeBarCharts', () => {
|
||||
// Assert widget creation was called with the correct parameters
|
||||
expect(getStatusCodeBarChartWidgetData).toHaveBeenCalledWith(
|
||||
mockDomainName,
|
||||
mockEndPointName,
|
||||
expect.objectContaining({
|
||||
items: expect.arrayContaining([
|
||||
expect.objectContaining({ id: 'custom-filter' }),
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
*
|
||||
* V5 Changes:
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - Domain filter: (http_host)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - stepInterval: 60 → null
|
||||
* - Grouped by response_status_code
|
||||
@@ -47,9 +47,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (http_host)
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -96,9 +96,9 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (http_host)
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -177,7 +177,7 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toBe(latencyExpression);
|
||||
|
||||
// Verify base filters
|
||||
expect(callsExpression).toContain('http_host');
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
|
||||
// Verify custom filters are merged
|
||||
@@ -187,4 +187,51 @@ describe('StatusCodeBarCharts - V5 Migration Validation', () => {
|
||||
expect(callsExpression).toContain('production');
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression in both charts', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/metrics',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const callsChartQuery = payload[4];
|
||||
const latencyChartQuery = payload[5];
|
||||
|
||||
const callsExpression =
|
||||
callsChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
const latencyExpression =
|
||||
latencyChartQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(callsExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
expect(latencyExpression).toContain(
|
||||
"(http.url = '/api/metrics' OR url.full = '/api/metrics')",
|
||||
);
|
||||
|
||||
// Base filters still present
|
||||
expect(callsExpression).toContain('net.peer.name');
|
||||
expect(callsExpression).toContain("kind_string = 'Client'");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
* These tests validate the migration from V4 to V5 format for the second payload
|
||||
* in getEndPointDetailsQueryPayload (status code table data):
|
||||
* - Filter format change: filters.items[] → filter.expression
|
||||
* - URL handling: Special logic for http_url
|
||||
* - Domain filter: http_host = '${domainName}'
|
||||
* - URL handling: Special logic for (http.url OR url.full)
|
||||
* - Domain filter: (net.peer.name OR server.address)
|
||||
* - Kind filter: kind_string = 'Client'
|
||||
* - Kind filter: response_status_code EXISTS
|
||||
* - Three queries: A (count), B (p99 latency), C (rate)
|
||||
@@ -45,9 +45,9 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(typeof queryA.filter?.expression).toBe('string');
|
||||
expect(queryA).not.toHaveProperty('filters.items');
|
||||
|
||||
// Base filter 1: Domain (http_host)
|
||||
// Base filter 1: Domain (net.peer.name OR server.address)
|
||||
expect(queryA.filter?.expression).toContain(
|
||||
`http_host = '${mockDomainName}'`,
|
||||
`(net.peer.name = '${mockDomainName}' OR server.address = '${mockDomainName}')`,
|
||||
);
|
||||
|
||||
// Base filter 2: Kind
|
||||
@@ -149,7 +149,7 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('http_host');
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
@@ -165,4 +165,62 @@ describe('StatusCodeTable - V5 Migration Validation', () => {
|
||||
expect(queries[1].filter?.expression).toBe(queries[2].filter?.expression);
|
||||
});
|
||||
});
|
||||
|
||||
describe('4. HTTP URL Filter Handling', () => {
|
||||
it('converts http.url filter to (http.url OR url.full) expression', () => {
|
||||
const filtersWithHttpUrl: IBuilderQuery['filters'] = {
|
||||
items: [
|
||||
{
|
||||
id: 'http-url-filter',
|
||||
key: {
|
||||
key: 'http.url',
|
||||
dataType: 'string' as any,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: '/api/users',
|
||||
},
|
||||
{
|
||||
id: 'service-filter',
|
||||
key: {
|
||||
key: 'service.name',
|
||||
dataType: 'string' as any,
|
||||
type: 'resource',
|
||||
},
|
||||
op: '=',
|
||||
value: 'user-service',
|
||||
},
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
|
||||
const payload = getEndPointDetailsQueryPayload(
|
||||
mockDomainName,
|
||||
mockStartTime,
|
||||
mockEndTime,
|
||||
filtersWithHttpUrl,
|
||||
);
|
||||
|
||||
const statusCodeQuery = payload[1];
|
||||
const expression =
|
||||
statusCodeQuery.query.builder.queryData[0].filter?.expression;
|
||||
|
||||
// CRITICAL: http.url converted to OR logic
|
||||
expect(expression).toContain(
|
||||
"(http.url = '/api/users' OR url.full = '/api/users')",
|
||||
);
|
||||
|
||||
// Other filters still present
|
||||
expect(expression).toContain('service.name');
|
||||
expect(expression).toContain('user-service');
|
||||
|
||||
// Base filters present
|
||||
expect(expression).toContain('net.peer.name');
|
||||
expect(expression).toContain("kind_string = 'Client'");
|
||||
expect(expression).toContain('response_status_code EXISTS');
|
||||
|
||||
// All ANDed together (at least 2 ANDs: domain+kind, custom filter, url condition)
|
||||
expect(expression?.match(/AND/g)?.length).toBeGreaterThanOrEqual(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,6 @@ import { rest, server } from 'mocks-server/server';
|
||||
import { fireEvent, render, screen, waitFor, within } from 'tests/test-utils';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { SPAN_ATTRIBUTES } from '../Explorer/Domains/DomainDetails/constants';
|
||||
import TopErrors from '../Explorer/Domains/DomainDetails/TopErrors';
|
||||
import { getTopErrorsQueryPayload } from '../utils';
|
||||
|
||||
@@ -84,7 +83,7 @@ describe('TopErrors', () => {
|
||||
{
|
||||
columns: [
|
||||
{
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -124,7 +123,7 @@ describe('TopErrors', () => {
|
||||
table: {
|
||||
rows: [
|
||||
{
|
||||
http_url: '/api/test',
|
||||
'http.url': '/api/test',
|
||||
A: 100,
|
||||
},
|
||||
],
|
||||
@@ -206,7 +205,7 @@ describe('TopErrors', () => {
|
||||
expect(navigateMock).toHaveBeenCalledWith({
|
||||
filters: expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: SPAN_ATTRIBUTES.HTTP_URL }),
|
||||
key: expect.objectContaining({ key: 'http.url' }),
|
||||
op: '=',
|
||||
value: '/api/test',
|
||||
}),
|
||||
@@ -216,7 +215,7 @@ describe('TopErrors', () => {
|
||||
value: 'true',
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: expect.objectContaining({ key: 'http_host' }),
|
||||
key: expect.objectContaining({ key: 'net.peer.name' }),
|
||||
op: '=',
|
||||
value: 'test-domain',
|
||||
}),
|
||||
@@ -335,7 +334,7 @@ describe('TopErrors', () => {
|
||||
|
||||
// Verify all required filters are present
|
||||
expect(filterExpression).toContain(
|
||||
`kind_string = 'Client' AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS AND ${SPAN_ATTRIBUTES.SERVER_NAME} = 'test-domain' AND has_error = true`,
|
||||
`kind_string = 'Client' AND (http.url EXISTS OR url.full EXISTS) AND (net.peer.name = 'test-domain' OR server.address = 'test-domain') AND has_error = true`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -15,6 +15,7 @@ import { getWidgetQueryBuilder } from 'container/MetricsApplication/MetricsAppli
|
||||
import { convertNanoToMilliseconds } from 'container/MetricsExplorer/Summary/utils';
|
||||
import dayjs from 'dayjs';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { RowData } from 'lib/query/createTableColumnsFromQuery';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import { ArrowUpDown, ChevronDown, ChevronRight, Info } from 'lucide-react';
|
||||
import { getWidgetQuery } from 'pages/MessagingQueues/MQDetails/MetricPage/MetricPageUtil';
|
||||
@@ -56,12 +57,12 @@ export const getDisplayValue = (value: unknown): string =>
|
||||
isEmptyFilterValue(value) ? '-' : String(value);
|
||||
|
||||
export const getDomainNameFilterExpression = (domainName: string): string =>
|
||||
`http_host = '${domainName}'`;
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`;
|
||||
|
||||
export const clientKindExpression = `kind_string = 'Client'`;
|
||||
|
||||
/**
|
||||
* Converts filters to expression
|
||||
* Converts filters to expression, handling http.url specially by creating (http.url OR url.full) condition
|
||||
* @param filters Filters to convert
|
||||
* @param baseExpression Base expression to combine with filters
|
||||
* @returns Filter expression string
|
||||
@@ -74,6 +75,34 @@ export const convertFiltersWithUrlHandling = (
|
||||
return baseExpression;
|
||||
}
|
||||
|
||||
// Check if filters contain http.url (SPAN_ATTRIBUTES.URL_PATH)
|
||||
const httpUrlFilter = filters.items?.find(
|
||||
(item) => item.key?.key === SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// If http.url filter exists, create modified filters with (http.url OR url.full)
|
||||
if (httpUrlFilter && httpUrlFilter.value) {
|
||||
// Remove ALL http.url filters from items (guards against duplicates)
|
||||
const otherFilters = filters.items?.filter(
|
||||
(item) => item.key?.key !== SPAN_ATTRIBUTES.URL_PATH,
|
||||
);
|
||||
|
||||
// Convert to expression first with other filters
|
||||
const {
|
||||
filter: intermediateFilter,
|
||||
} = convertFiltersToExpressionWithExistingQuery(
|
||||
{ ...filters, items: otherFilters || [] },
|
||||
baseExpression,
|
||||
);
|
||||
|
||||
// Add the OR condition for http.url and url.full
|
||||
const urlValue = httpUrlFilter.value;
|
||||
const urlCondition = `(http.url = '${urlValue}' OR url.full = '${urlValue}')`;
|
||||
return intermediateFilter.expression.trim()
|
||||
? `${intermediateFilter.expression} AND ${urlCondition}`
|
||||
: urlCondition;
|
||||
}
|
||||
|
||||
const { filter } = convertFiltersToExpressionWithExistingQuery(
|
||||
filters,
|
||||
baseExpression,
|
||||
@@ -342,7 +371,7 @@ export const formatDataForTable = (
|
||||
});
|
||||
};
|
||||
|
||||
const urlExpression = `${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`;
|
||||
const urlExpression = `(url.full EXISTS OR http.url EXISTS)`;
|
||||
|
||||
export const getDomainMetricsQueryPayload = (
|
||||
domainName: string,
|
||||
@@ -559,7 +588,14 @@ const defaultGroupBy = [
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
dataType: DataTypes.String,
|
||||
isColumn: false,
|
||||
isJSON: false,
|
||||
key: 'url.full',
|
||||
type: 'attribute',
|
||||
},
|
||||
// {
|
||||
@@ -831,8 +867,8 @@ function buildFilterExpression(
|
||||
): string {
|
||||
const baseFilterParts = [
|
||||
`kind_string = 'Client'`,
|
||||
`${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
`${SPAN_ATTRIBUTES.SERVER_NAME} = '${domainName}'`,
|
||||
`(http.url EXISTS OR url.full EXISTS)`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
`has_error = true`,
|
||||
];
|
||||
if (showStatusCodeErrors) {
|
||||
@@ -874,7 +910,12 @@ export const getTopErrorsQueryPayload = (
|
||||
filter: { expression: filterExpression },
|
||||
groupBy: [
|
||||
{
|
||||
name: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
name: 'http.url',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
{
|
||||
name: 'url.full',
|
||||
fieldDataType: 'string',
|
||||
fieldContext: 'attribute',
|
||||
},
|
||||
@@ -1093,11 +1134,11 @@ export const formatEndPointsDataForTable = (
|
||||
if (!isGroupedByAttribute) {
|
||||
formattedData = data?.map((endpoint) => {
|
||||
const { port } = extractPortAndEndpoint(
|
||||
(endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '',
|
||||
(endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '',
|
||||
);
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.HTTP_URL] as string) || '-',
|
||||
endpointName: (endpoint.data[SPAN_ATTRIBUTES.URL_PATH] as string) || '-',
|
||||
port,
|
||||
callCount:
|
||||
endpoint.data.A === 'n/a' || endpoint.data.A === undefined
|
||||
@@ -1221,7 +1262,9 @@ export const formatTopErrorsDataForTable = (
|
||||
|
||||
return {
|
||||
key: v4(),
|
||||
endpointName: getDisplayValue(rowObj[SPAN_ATTRIBUTES.HTTP_URL]),
|
||||
endpointName: getDisplayValue(
|
||||
rowObj[SPAN_ATTRIBUTES.URL_PATH] || rowObj['url.full'],
|
||||
),
|
||||
statusCode: getDisplayValue(rowObj[SPAN_ATTRIBUTES.RESPONSE_STATUS_CODE]),
|
||||
statusMessage: getDisplayValue(rowObj.status_message),
|
||||
count: getDisplayValue(rowObj.__result_0),
|
||||
@@ -1238,10 +1281,10 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
{
|
||||
id: 'ea16470b',
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: 'http.url',
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
id: `${SPAN_ATTRIBUTES.HTTP_URL}--string--tag--false`,
|
||||
id: 'http.url--string--tag--false',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
@@ -1259,7 +1302,7 @@ export const getTopErrorsCoRelationQueryFilters = (
|
||||
{
|
||||
id: 'e8a043b7',
|
||||
key: {
|
||||
key: SPAN_ATTRIBUTES.SERVER_NAME,
|
||||
key: 'net.peer.name',
|
||||
dataType: DataTypes.String,
|
||||
type: '',
|
||||
},
|
||||
@@ -1738,7 +1781,7 @@ export const getEndPointDetailsQueryPayload = (
|
||||
filters || { items: [], op: 'AND' },
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND ${SPAN_ATTRIBUTES.HTTP_URL} EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
expression: 'A',
|
||||
@@ -1750,7 +1793,12 @@ export const getEndPointDetailsQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
{
|
||||
key: 'url.full',
|
||||
dataType: DataTypes.String,
|
||||
type: 'attribute',
|
||||
},
|
||||
@@ -2177,7 +2225,7 @@ export const getEndPointZeroStateQueryPayload = (
|
||||
orderBy: [],
|
||||
groupBy: [
|
||||
{
|
||||
key: SPAN_ATTRIBUTES.HTTP_URL,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
dataType: DataTypes.String,
|
||||
type: 'tag',
|
||||
},
|
||||
@@ -2371,7 +2419,8 @@ export const statusCodeWidgetInfo = [
|
||||
|
||||
interface EndPointDropDownResponseRow {
|
||||
data: {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: string;
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: string;
|
||||
'url.full': string;
|
||||
A: number;
|
||||
};
|
||||
}
|
||||
@@ -2390,8 +2439,8 @@ export const getFormattedEndPointDropDownData = (
|
||||
}
|
||||
return data.map((row) => ({
|
||||
key: v4(),
|
||||
label: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.HTTP_URL] || '-',
|
||||
label: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
value: row.data[SPAN_ATTRIBUTES.URL_PATH] || row.data['url.full'] || '-',
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -2720,6 +2769,7 @@ export const groupStatusCodes = (
|
||||
|
||||
export const getStatusCodeBarChartWidgetData = (
|
||||
domainName: string,
|
||||
endPointName: string,
|
||||
filters: IBuilderQuery['filters'],
|
||||
): Widgets => ({
|
||||
query: {
|
||||
@@ -2748,6 +2798,20 @@ export const getStatusCodeBarChartWidgetData = (
|
||||
op: '=',
|
||||
value: domainName,
|
||||
},
|
||||
...(endPointName
|
||||
? [
|
||||
{
|
||||
id: '8b1be6f0',
|
||||
key: {
|
||||
dataType: DataTypes.String,
|
||||
key: SPAN_ATTRIBUTES.URL_PATH,
|
||||
type: 'tag',
|
||||
},
|
||||
op: '=',
|
||||
value: endPointName,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(filters?.items || []),
|
||||
],
|
||||
op: 'AND',
|
||||
@@ -2869,7 +2933,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2901,7 +2965,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2933,7 +2997,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2965,7 +3029,7 @@ export const getAllEndpointsWidgetData = (
|
||||
filters,
|
||||
`${getDomainNameFilterExpression(
|
||||
domainName,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND http_url EXISTS`,
|
||||
)} AND ${clientKindExpression} AND has_error = true AND (http.url EXISTS OR url.full EXISTS)`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -2996,12 +3060,24 @@ export const getAllEndpointsWidgetData = (
|
||||
);
|
||||
|
||||
widget.renderColumnCell = {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: (url: string | number): ReactNode => {
|
||||
if (isEmptyFilterValue(url) || !url || url === 'n/a') {
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: (
|
||||
url: string | number,
|
||||
record?: RowData,
|
||||
): ReactNode => {
|
||||
// First try to use the url from the column value
|
||||
let urlValue = url;
|
||||
|
||||
// If url is empty/null and we have the record, fallback to url.full
|
||||
if (isEmptyFilterValue(url) && record) {
|
||||
const { 'url.full': urlFull } = record;
|
||||
urlValue = urlFull;
|
||||
}
|
||||
|
||||
if (!urlValue || urlValue === 'n/a') {
|
||||
return <span>-</span>;
|
||||
}
|
||||
|
||||
const { endpoint } = extractPortAndEndpoint(String(url));
|
||||
const { endpoint } = extractPortAndEndpoint(String(urlValue));
|
||||
return <span>{getDisplayValue(endpoint)}</span>;
|
||||
},
|
||||
A: (numOfCalls: any): ReactNode => (
|
||||
@@ -3056,8 +3132,8 @@ export const getAllEndpointsWidgetData = (
|
||||
};
|
||||
|
||||
widget.customColTitles = {
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: 'Endpoint',
|
||||
[SPAN_ATTRIBUTES.SERVER_PORT]: 'Port',
|
||||
[SPAN_ATTRIBUTES.URL_PATH]: 'Endpoint',
|
||||
'net.peer.port': 'Port',
|
||||
};
|
||||
|
||||
widget.title = (
|
||||
@@ -3082,10 +3158,12 @@ export const getAllEndpointsWidgetData = (
|
||||
</div>
|
||||
);
|
||||
|
||||
widget.hiddenColumns = ['url.full'];
|
||||
|
||||
return widget;
|
||||
};
|
||||
|
||||
const keysToRemove = [SPAN_ATTRIBUTES.HTTP_URL, 'A', 'B', 'C', 'F1'];
|
||||
const keysToRemove = ['http.url', 'url.full', 'A', 'B', 'C', 'F1'];
|
||||
|
||||
export const getGroupByFiltersFromGroupByValues = (
|
||||
rowData: any,
|
||||
@@ -3143,7 +3221,7 @@ export const getRateOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`http_host = '${domainName}'`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
@@ -3194,7 +3272,7 @@ export const getLatencyOverTimeWidgetData = (
|
||||
filter: {
|
||||
expression: convertFiltersWithUrlHandling(
|
||||
filters || { items: [], op: 'AND' },
|
||||
`http_host = '${domainName}'`,
|
||||
`(net.peer.name = '${domainName}' OR server.address = '${domainName}')`,
|
||||
),
|
||||
},
|
||||
functions: [],
|
||||
|
||||
@@ -9,74 +9,6 @@
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
padding: 0 8px;
|
||||
box-sizing: border-box;
|
||||
|
||||
.dashboard-breadcrumbs {
|
||||
width: 100%;
|
||||
height: 48px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
max-width: 80%;
|
||||
|
||||
.dashboard-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.dashboard-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.id-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0px 2px;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
height: 20px;
|
||||
|
||||
max-width: calc(100% - 120px);
|
||||
|
||||
span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 4px;
|
||||
}
|
||||
}
|
||||
.id-btn:hover {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dashboard-details {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -535,15 +467,6 @@
|
||||
.dashboard-description-container {
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
.dashboard-breadcrumbs {
|
||||
.dashboard-btn {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dashboard-details {
|
||||
.left-section {
|
||||
.dashboard-title {
|
||||
|
||||
@@ -16,9 +16,7 @@ import {
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ConfigureIcon from 'assets/Integrations/ConfigureIcon';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
@@ -27,7 +25,6 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import {
|
||||
Check,
|
||||
@@ -37,7 +34,6 @@ import {
|
||||
FolderKanban,
|
||||
Fullscreen,
|
||||
Globe,
|
||||
LayoutGrid,
|
||||
LockKeyhole,
|
||||
PenLine,
|
||||
X,
|
||||
@@ -51,6 +47,7 @@ import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import DashboardHeader from '../components/DashboardHeader/DashboardHeader';
|
||||
import DashboardGraphSlider from '../ComponentsSlider';
|
||||
import DashboardSettings from '../DashboardSettings';
|
||||
import { Base64Icons } from '../DashboardSettings/General/utils';
|
||||
@@ -71,7 +68,6 @@ interface DashboardDescriptionProps {
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { handle } = props;
|
||||
const {
|
||||
selectedDashboard,
|
||||
@@ -80,7 +76,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
layouts,
|
||||
setLayouts,
|
||||
isDashboardLocked,
|
||||
listSortOrder,
|
||||
setSelectedDashboard,
|
||||
handleToggleDashboardSlider,
|
||||
setSelectedRowWidgetId,
|
||||
@@ -292,17 +287,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
function goToListPage(): void {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}
|
||||
|
||||
const {
|
||||
data: publicDashboardResponse,
|
||||
isLoading: isLoadingPublicDashboardData,
|
||||
@@ -351,32 +335,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<Card className="dashboard-description-container">
|
||||
<div className="dashboard-header">
|
||||
<section className="dashboard-breadcrumbs">
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LayoutGrid size={14} />}
|
||||
className="dashboard-btn"
|
||||
onClick={(): void => goToListPage()}
|
||||
>
|
||||
Dashboard /
|
||||
</Button>
|
||||
<Button type="text" className="id-btn dashboard-name-btn">
|
||||
<img
|
||||
src={image}
|
||||
alt="dashboard-icon"
|
||||
style={{ height: '14px', width: '14px' }}
|
||||
/>
|
||||
{title}
|
||||
</Button>
|
||||
</section>
|
||||
|
||||
<HeaderRightSection
|
||||
enableAnnouncements={false}
|
||||
enableShare
|
||||
enableFeedback
|
||||
/>
|
||||
</div>
|
||||
<DashboardHeader />
|
||||
<section className="dashboard-details">
|
||||
<div className="left-section">
|
||||
<img src={image} alt="dashboard-img" className="dashboard-img" />
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
.dashboard-breadcrumbs {
|
||||
width: 100%;
|
||||
height: 48px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
max-width: 80%;
|
||||
|
||||
.dashboard-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.dashboard-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.id-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 0px 2px;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
height: 20px;
|
||||
|
||||
max-width: calc(100% - 120px);
|
||||
|
||||
span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 4px;
|
||||
}
|
||||
}
|
||||
.id-btn:hover {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-300);
|
||||
}
|
||||
|
||||
.dashboard-icon-image {
|
||||
height: 14px;
|
||||
width: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.dashboard-breadcrumbs {
|
||||
.dashboard-btn {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useCallback } from 'react';
|
||||
import { Button } from 'antd';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { LayoutGrid } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { Base64Icons } from '../../DashboardSettings/General/utils';
|
||||
|
||||
import './DashboardBreadcrumbs.styles.scss';
|
||||
|
||||
function DashboardBreadcrumbs(): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { selectedDashboard, listSortOrder } = useDashboard();
|
||||
|
||||
const selectedData = selectedDashboard
|
||||
? {
|
||||
...selectedDashboard.data,
|
||||
uuid: selectedDashboard.id,
|
||||
}
|
||||
: ({} as DashboardData);
|
||||
|
||||
const { title = '', image = Base64Icons[0] } = selectedData || {};
|
||||
|
||||
const goToListPage = useCallback(() => {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}, [listSortOrder, safeNavigate]);
|
||||
|
||||
return (
|
||||
<div className="dashboard-breadcrumbs">
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LayoutGrid size={14} />}
|
||||
className="dashboard-btn"
|
||||
onClick={goToListPage}
|
||||
>
|
||||
Dashboard /
|
||||
</Button>
|
||||
<Button type="text" className="id-btn dashboard-name-btn">
|
||||
<img src={image} alt="dashboard-icon" className="dashboard-icon-image" />
|
||||
{title}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DashboardBreadcrumbs;
|
||||
@@ -0,0 +1,15 @@
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
padding: 0 8px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { memo } from 'react';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
|
||||
import DashboardBreadcrumbs from './DashboardBreadcrumbs';
|
||||
|
||||
import './DashboardHeader.styles.scss';
|
||||
|
||||
function DashboardHeader(): JSX.Element {
|
||||
return (
|
||||
<div className="dashboard-header">
|
||||
<DashboardBreadcrumbs />
|
||||
<HeaderRightSection enableAnnouncements={false} enableShare enableFeedback />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(DashboardHeader);
|
||||
@@ -23,6 +23,7 @@ export default function ChartWrapper({
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
showTooltip = true,
|
||||
showLegend = true,
|
||||
canPinTooltip = false,
|
||||
syncMode,
|
||||
syncKey,
|
||||
@@ -36,6 +37,9 @@ export default function ChartWrapper({
|
||||
|
||||
const legendComponent = useCallback(
|
||||
(averageLegendWidth: number): React.ReactNode => {
|
||||
if (!showLegend) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Legend
|
||||
config={config}
|
||||
@@ -44,7 +48,7 @@ export default function ChartWrapper({
|
||||
/>
|
||||
);
|
||||
},
|
||||
[config, legendConfig.position],
|
||||
[config, legendConfig.position, showLegend],
|
||||
);
|
||||
|
||||
const renderTooltipCallback = useCallback(
|
||||
@@ -60,6 +64,7 @@ export default function ChartWrapper({
|
||||
return (
|
||||
<PlotContextProvider>
|
||||
<ChartLayout
|
||||
showLegend={showLegend}
|
||||
config={config}
|
||||
containerWidth={containerWidth}
|
||||
containerHeight={containerHeight}
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
} from 'lib/uPlotV2/components/types';
|
||||
|
||||
import { HistogramChartProps } from '../types';
|
||||
|
||||
export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
const {
|
||||
children,
|
||||
renderTooltip: customRenderTooltip,
|
||||
isQueriesMerged,
|
||||
...rest
|
||||
} = props;
|
||||
|
||||
const renderTooltip = useCallback(
|
||||
(props: TooltipRenderArgs): React.ReactNode => {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
[customRenderTooltip, rest.timezone, rest.yAxisUnit, rest.decimalPrecision],
|
||||
);
|
||||
|
||||
return (
|
||||
<ChartWrapper
|
||||
showLegend={!isQueriesMerged}
|
||||
{...rest}
|
||||
renderTooltip={renderTooltip}
|
||||
>
|
||||
{children}
|
||||
</ChartWrapper>
|
||||
);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ interface BaseChartProps {
|
||||
width: number;
|
||||
height: number;
|
||||
showTooltip?: boolean;
|
||||
showLegend?: boolean;
|
||||
timezone: string;
|
||||
canPinTooltip?: boolean;
|
||||
yAxisUnit?: string;
|
||||
@@ -17,6 +18,7 @@ interface BaseChartProps {
|
||||
interface UPlotBasedChartProps {
|
||||
config: UPlotConfigBuilder;
|
||||
data: uPlot.AlignedData;
|
||||
legendConfig: LegendConfig;
|
||||
syncMode?: DashboardCursorSync;
|
||||
syncKey?: string;
|
||||
plotRef?: (plot: uPlot | null) => void;
|
||||
@@ -26,14 +28,20 @@ interface UPlotBasedChartProps {
|
||||
}
|
||||
|
||||
export interface TimeSeriesChartProps
|
||||
extends BaseChartProps,
|
||||
UPlotBasedChartProps {}
|
||||
|
||||
export interface HistogramChartProps
|
||||
extends BaseChartProps,
|
||||
UPlotBasedChartProps {
|
||||
legendConfig: LegendConfig;
|
||||
isQueriesMerged?: boolean;
|
||||
}
|
||||
|
||||
export interface BarChartProps extends BaseChartProps, UPlotBasedChartProps {
|
||||
legendConfig: LegendConfig;
|
||||
isStackedBarChart?: boolean;
|
||||
}
|
||||
|
||||
export type ChartProps = TimeSeriesChartProps | BarChartProps;
|
||||
export type ChartProps =
|
||||
| TimeSeriesChartProps
|
||||
| BarChartProps
|
||||
| HistogramChartProps;
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { useScrollWidgetIntoView } from '../useScrollWidgetIntoView';
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard');
|
||||
|
||||
type MockHTMLElement = {
|
||||
scrollIntoView: jest.Mock;
|
||||
focus: jest.Mock;
|
||||
};
|
||||
|
||||
function createMockElement(): MockHTMLElement {
|
||||
return {
|
||||
scrollIntoView: jest.fn(),
|
||||
focus: jest.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
describe('useScrollWidgetIntoView', () => {
|
||||
const mockedUseDashboard = useDashboard as jest.MockedFunction<
|
||||
typeof useDashboard
|
||||
>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('scrolls into view and focuses when toScrollWidgetId matches widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'widget-id',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
expect(mockElement.scrollIntoView).toHaveBeenCalledWith({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
expect(mockElement.focus).toHaveBeenCalled();
|
||||
expect(setToScrollWidgetId).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('does nothing when toScrollWidgetId does not match widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'other-widget',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
expect(mockElement.scrollIntoView).not.toHaveBeenCalled();
|
||||
expect(mockElement.focus).not.toHaveBeenCalled();
|
||||
expect(setToScrollWidgetId).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,26 @@
|
||||
import { RefObject, useEffect } from 'react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
/**
|
||||
* Scrolls the given widget container into view when the dashboard
|
||||
* requests it via `toScrollWidgetId`.
|
||||
*
|
||||
* Intended for use in panel components that render a single widget.
|
||||
*/
|
||||
export function useScrollWidgetIntoView<T extends HTMLElement>(
|
||||
widgetId: string,
|
||||
widgetContainerRef: RefObject<T>,
|
||||
): void {
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widgetId) {
|
||||
widgetContainerRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
widgetContainerRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widgetId, widgetContainerRef]);
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
import { useMemo } from 'react';
|
||||
import cx from 'classnames';
|
||||
import { calculateChartDimensions } from 'container/DashboardContainer/visualization/charts/utils';
|
||||
import { MAX_LEGEND_WIDTH } from 'lib/uPlotV2/components/Legend/Legend';
|
||||
import { LegendConfig, LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
|
||||
import './ChartLayout.styles.scss';
|
||||
|
||||
export interface ChartLayoutProps {
|
||||
showLegend?: boolean;
|
||||
legendComponent: (legendPerSet: number) => React.ReactNode;
|
||||
children: (props: {
|
||||
chartWidth: number;
|
||||
@@ -20,6 +22,7 @@ export interface ChartLayoutProps {
|
||||
config: UPlotConfigBuilder;
|
||||
}
|
||||
export default function ChartLayout({
|
||||
showLegend = true,
|
||||
legendComponent,
|
||||
children,
|
||||
layoutChildren,
|
||||
@@ -30,6 +33,15 @@ export default function ChartLayout({
|
||||
}: ChartLayoutProps): JSX.Element {
|
||||
const chartDimensions = useMemo(
|
||||
() => {
|
||||
if (!showLegend) {
|
||||
return {
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
legendWidth: 0,
|
||||
legendHeight: 0,
|
||||
averageLegendWidth: MAX_LEGEND_WIDTH,
|
||||
};
|
||||
}
|
||||
const legendItemsMap = config.getLegendItems();
|
||||
const seriesLabels = Object.values(legendItemsMap)
|
||||
.map((item) => item.label)
|
||||
@@ -42,7 +54,7 @@ export default function ChartLayout({
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[containerWidth, containerHeight, legendConfig],
|
||||
[containerWidth, containerHeight, legendConfig, showLegend],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -60,15 +72,17 @@ export default function ChartLayout({
|
||||
averageLegendWidth: chartDimensions.averageLegendWidth,
|
||||
})}
|
||||
</div>
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
{showLegend && (
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{layoutChildren}
|
||||
</div>
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import ContextMenu from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
@@ -27,7 +27,6 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const uPlotRef = useRef<uPlot | null>(null);
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
@@ -36,16 +35,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { DashboardCursorSync } from 'lib/uPlotV2/plugins/TooltipPlugin/types';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import Histogram from '../../charts/Histogram/Histogram';
|
||||
import ChartManager from '../../components/ChartManager/ChartManager';
|
||||
import {
|
||||
prepareHistogramPanelConfig,
|
||||
prepareHistogramPanelData,
|
||||
} from './utils';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
|
||||
function HistogramPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const {
|
||||
panelMode,
|
||||
queryResponse,
|
||||
widget,
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const uPlotRef = useRef<uPlot | null>(null);
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
const config = useMemo(() => {
|
||||
return prepareHistogramPanelConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
panelMode,
|
||||
});
|
||||
}, [widget, isDarkMode, queryResponse?.data?.payload, panelMode]);
|
||||
|
||||
const chartData = useMemo(() => {
|
||||
if (!queryResponse?.data?.payload) {
|
||||
return [];
|
||||
}
|
||||
return prepareHistogramPanelData({
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
bucketWidth: widget?.bucketWidth,
|
||||
bucketCount: widget?.bucketCount,
|
||||
mergeAllActiveQueries: widget?.mergeAllActiveQueries,
|
||||
});
|
||||
}, [
|
||||
queryResponse?.data?.payload,
|
||||
widget?.bucketWidth,
|
||||
widget?.bucketCount,
|
||||
widget?.mergeAllActiveQueries,
|
||||
]);
|
||||
|
||||
const layoutChildren = useMemo(() => {
|
||||
if (!isFullViewMode || widget.mergeAllActiveQueries) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<ChartManager
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
}, [
|
||||
isFullViewMode,
|
||||
config,
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
widget.mergeAllActiveQueries,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="panel-container" ref={graphRef}>
|
||||
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
|
||||
<Histogram
|
||||
config={config}
|
||||
legendConfig={{
|
||||
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
|
||||
}}
|
||||
plotRef={(plot: uPlot | null): void => {
|
||||
uPlotRef.current = plot;
|
||||
}}
|
||||
onDestroy={(): void => {
|
||||
uPlotRef.current = null;
|
||||
}}
|
||||
isQueriesMerged={widget.mergeAllActiveQueries}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
syncMode={DashboardCursorSync.Crosshair}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
height={containerDimensions.height}
|
||||
layoutChildren={layoutChildren}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HistogramPanel;
|
||||
@@ -0,0 +1,223 @@
|
||||
/* eslint-disable simple-import-sort/imports */
|
||||
import type { UseQueryResult } from 'react-query';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { PanelMode } from 'container/DashboardContainer/visualization/panels/types';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
MetricQueryRangeSuccessResponse,
|
||||
MetricRangePayloadProps,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import HistogramPanel from '../HistogramPanel';
|
||||
import { HistogramChartProps } from 'container/DashboardContainer/visualization/charts/types';
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: jest.fn().mockReturnValue({ width: 800, height: 400 }),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: jest.fn().mockReturnValue(false),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
__esModule: true,
|
||||
// Provide a no-op provider component so AllTheProviders can render
|
||||
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
|
||||
<>{children}</>
|
||||
),
|
||||
// And mock the hook used by HistogramPanel
|
||||
useTimezone: jest.fn().mockReturnValue({
|
||||
timezone: { value: 'UTC' },
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView',
|
||||
() => ({
|
||||
useScrollWidgetIntoView: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/charts/Histogram/Histogram',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: (props: HistogramChartProps): JSX.Element => (
|
||||
<div data-testid="histogram-chart">
|
||||
<div data-testid="histogram-props">
|
||||
{JSON.stringify({
|
||||
legendPosition: props.legendConfig?.position,
|
||||
isQueriesMerged: props.isQueriesMerged,
|
||||
yAxisUnit: props.yAxisUnit,
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
})}
|
||||
</div>
|
||||
{props.layoutChildren}
|
||||
</div>
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/components/ChartManager/ChartManager',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="chart-manager">ChartManager</div>
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
function createQueryResponse(
|
||||
payloadOverrides: Partial<MetricRangePayloadProps> = {},
|
||||
): { data: { payload: MetricRangePayloadProps } } {
|
||||
const basePayload: MetricRangePayloadProps = {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'A',
|
||||
legend: 'Series A',
|
||||
values: [
|
||||
[1, '10'],
|
||||
[2, '20'],
|
||||
],
|
||||
},
|
||||
],
|
||||
resultType: 'matrix',
|
||||
newResult: {
|
||||
data: {
|
||||
result: [],
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
data: {
|
||||
payload: {
|
||||
...basePayload,
|
||||
...payloadOverrides,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
type WidgetLike = {
|
||||
id: string;
|
||||
yAxisUnit: string;
|
||||
decimalPrecision: number;
|
||||
legendPosition: LegendPosition;
|
||||
mergeAllActiveQueries: boolean;
|
||||
};
|
||||
|
||||
function createWidget(overrides: Partial<WidgetLike> = {}): WidgetLike {
|
||||
return {
|
||||
id: 'widget-id',
|
||||
yAxisUnit: 'ms',
|
||||
decimalPrecision: 2,
|
||||
legendPosition: LegendPosition.BOTTOM,
|
||||
mergeAllActiveQueries: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('HistogramPanel', () => {
|
||||
it('renders Histogram when container has dimensions', () => {
|
||||
const widget = (createWidget() as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode={false}
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('histogram-chart')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('passes legend position and other props to Histogram', () => {
|
||||
const widget = (createWidget({
|
||||
legendPosition: LegendPosition.RIGHT,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode={false}
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
const propsJson = screen.getByTestId('histogram-props').textContent || '{}';
|
||||
const parsed = JSON.parse(propsJson);
|
||||
|
||||
expect(parsed.legendPosition).toBe(LegendPosition.RIGHT);
|
||||
expect(parsed.yAxisUnit).toBe('ms');
|
||||
expect(parsed.decimalPrecision).toBe(2);
|
||||
});
|
||||
|
||||
it('renders ChartManager in full view when queries are not merged', () => {
|
||||
const widget = (createWidget({
|
||||
mergeAllActiveQueries: false,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('chart-manager')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render ChartManager when queries are merged', () => {
|
||||
const widget = (createWidget({
|
||||
mergeAllActiveQueries: true,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('chart-manager')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,231 @@
|
||||
import { histogramBucketSizes } from '@grafana/data';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||
import { getLegend } from 'lib/dashboard/getQueryResults';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import { DrawStyle } from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { AlignedData } from 'uplot';
|
||||
import { incrRoundDn, roundDecimals } from 'utils/round';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
import { buildBaseConfig } from '../utils/baseConfigBuilder';
|
||||
import {
|
||||
buildHistogramBuckets,
|
||||
mergeAlignedDataTables,
|
||||
prependNullBinToFirstHistogramSeries,
|
||||
replaceUndefinedWithNullInAlignedData,
|
||||
} from '../utils/histogram';
|
||||
|
||||
export interface PrepareHistogramPanelDataParams {
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
bucketWidth?: number;
|
||||
bucketCount?: number;
|
||||
mergeAllActiveQueries?: boolean;
|
||||
}
|
||||
|
||||
const BUCKET_OFFSET = 0;
|
||||
const HIST_SORT = (a: number, b: number): number => a - b;
|
||||
|
||||
function extractNumericValues(
|
||||
result: MetricRangePayloadProps['data']['result'],
|
||||
): number[] {
|
||||
const values: number[] = [];
|
||||
for (const item of result) {
|
||||
for (const [, valueStr] of item.values) {
|
||||
values.push(Number.parseFloat(valueStr) || 0);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
function computeSmallestDelta(sortedValues: number[]): number {
|
||||
if (sortedValues.length <= 1) {
|
||||
return 0;
|
||||
}
|
||||
let smallest = Infinity;
|
||||
for (let i = 1; i < sortedValues.length; i++) {
|
||||
const delta = sortedValues[i] - sortedValues[i - 1];
|
||||
if (delta > 0) {
|
||||
smallest = Math.min(smallest, delta);
|
||||
}
|
||||
}
|
||||
return smallest === Infinity ? 0 : smallest;
|
||||
}
|
||||
|
||||
function selectBucketSize({
|
||||
range,
|
||||
bucketCount,
|
||||
smallestDelta,
|
||||
bucketWidthOverride,
|
||||
}: {
|
||||
range: number;
|
||||
bucketCount: number;
|
||||
smallestDelta: number;
|
||||
bucketWidthOverride?: number;
|
||||
}): number {
|
||||
if (bucketWidthOverride != null && bucketWidthOverride > 0) {
|
||||
return bucketWidthOverride;
|
||||
}
|
||||
const targetSize = range / bucketCount;
|
||||
for (const candidate of histogramBucketSizes) {
|
||||
if (targetSize < candidate && candidate >= smallestDelta) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function buildFrames(
|
||||
result: MetricRangePayloadProps['data']['result'],
|
||||
mergeAllActiveQueries: boolean,
|
||||
): number[][] {
|
||||
const frames: number[][] = result.map((item) =>
|
||||
item.values.map(([, valueStr]) => Number.parseFloat(valueStr) || 0),
|
||||
);
|
||||
if (mergeAllActiveQueries && frames.length > 1) {
|
||||
const first = frames[0];
|
||||
for (let i = 1; i < frames.length; i++) {
|
||||
first.push(...frames[i]);
|
||||
frames[i] = [];
|
||||
}
|
||||
}
|
||||
return frames;
|
||||
}
|
||||
|
||||
export function prepareHistogramPanelData({
|
||||
apiResponse,
|
||||
bucketWidth,
|
||||
bucketCount: bucketCountProp = DEFAULT_BUCKET_COUNT,
|
||||
mergeAllActiveQueries = false,
|
||||
}: PrepareHistogramPanelDataParams): AlignedData {
|
||||
const bucketCount = bucketCountProp ?? DEFAULT_BUCKET_COUNT;
|
||||
const result = apiResponse.data.result;
|
||||
|
||||
const seriesValues = extractNumericValues(result);
|
||||
if (seriesValues.length === 0) {
|
||||
return [[]];
|
||||
}
|
||||
|
||||
const sorted = [...seriesValues].sort((a, b) => a - b);
|
||||
const min = sorted[0];
|
||||
const max = sorted[sorted.length - 1];
|
||||
const range = max - min;
|
||||
const smallestDelta = computeSmallestDelta(sorted);
|
||||
let bucketSize = selectBucketSize({
|
||||
range,
|
||||
bucketCount,
|
||||
smallestDelta,
|
||||
bucketWidthOverride: bucketWidth,
|
||||
});
|
||||
if (bucketSize <= 0) {
|
||||
bucketSize = range > 0 ? range / bucketCount : 1;
|
||||
}
|
||||
|
||||
const getBucket = (v: number): number =>
|
||||
roundDecimals(incrRoundDn(v - BUCKET_OFFSET, bucketSize) + BUCKET_OFFSET, 9);
|
||||
|
||||
const frames = buildFrames(result, mergeAllActiveQueries);
|
||||
const histogramsPerSeries: AlignedData[] = frames
|
||||
.filter((frame) => frame.length > 0)
|
||||
.map((frame) => buildHistogramBuckets(frame, getBucket, HIST_SORT));
|
||||
|
||||
if (histogramsPerSeries.length === 0) {
|
||||
return [[]];
|
||||
}
|
||||
|
||||
const mergedHistogramData = mergeAlignedDataTables(histogramsPerSeries);
|
||||
replaceUndefinedWithNullInAlignedData(mergedHistogramData);
|
||||
prependNullBinToFirstHistogramSeries(mergedHistogramData, bucketSize);
|
||||
return mergedHistogramData;
|
||||
}
|
||||
|
||||
export function prepareHistogramPanelConfig({
|
||||
widget,
|
||||
apiResponse,
|
||||
panelMode,
|
||||
isDarkMode,
|
||||
}: {
|
||||
widget: Widgets;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
panelMode: PanelMode;
|
||||
isDarkMode: boolean;
|
||||
}): UPlotConfigBuilder {
|
||||
const builder = buildBaseConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
apiResponse,
|
||||
panelMode,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
});
|
||||
builder.setCursor({
|
||||
drag: {
|
||||
x: false,
|
||||
y: false,
|
||||
setScale: true,
|
||||
},
|
||||
focus: {
|
||||
prox: 1e3,
|
||||
},
|
||||
});
|
||||
|
||||
builder.addScale({
|
||||
scaleKey: 'x',
|
||||
time: false,
|
||||
auto: true,
|
||||
});
|
||||
builder.addScale({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
auto: true,
|
||||
min: 0,
|
||||
});
|
||||
|
||||
const currentQuery = widget.query;
|
||||
const mergeAllActiveQueries = widget?.mergeAllActiveQueries ?? false;
|
||||
|
||||
// When merged, data has only one y column; add one series to match. Otherwise add one per result.
|
||||
if (mergeAllActiveQueries) {
|
||||
builder.addSeries({
|
||||
label: '',
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: false,
|
||||
barWidthFactor: 1,
|
||||
pointSize: 5,
|
||||
lineColor: '#3f5ecc',
|
||||
fillColor: '#4E74F8',
|
||||
isDarkMode,
|
||||
});
|
||||
} else {
|
||||
apiResponse.data.result.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
series.legend || '',
|
||||
);
|
||||
|
||||
const label = currentQuery
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
builder.addSeries({
|
||||
label: label,
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: false,
|
||||
barWidthFactor: 1,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
@@ -2,12 +2,12 @@ import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
|
||||
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { ContextMenu } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
@@ -26,7 +26,6 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
@@ -35,16 +34,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
@@ -19,9 +19,9 @@ export interface BaseConfigBuilderProps {
|
||||
widget: Widgets;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
isDarkMode: boolean;
|
||||
onClick: OnClickPluginOpts['onClick'];
|
||||
onDragSelect: (startTime: number, endTime: number) => void;
|
||||
timezone: Timezone;
|
||||
onClick?: OnClickPluginOpts['onClick'];
|
||||
onDragSelect?: (startTime: number, endTime: number) => void;
|
||||
timezone?: Timezone;
|
||||
panelMode: PanelMode;
|
||||
panelType: PANEL_TYPES;
|
||||
minTimeScale?: number;
|
||||
@@ -40,8 +40,10 @@ export function buildBaseConfig({
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
}: BaseConfigBuilderProps): UPlotConfigBuilder {
|
||||
const tzDate = (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
|
||||
const tzDate = timezone
|
||||
? (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value)
|
||||
: undefined;
|
||||
|
||||
const builder = new UPlotConfigBuilder({
|
||||
onDragSelect,
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
import {
|
||||
NULL_EXPAND,
|
||||
NULL_REMOVE,
|
||||
NULL_RETAIN,
|
||||
} from 'container/PanelWrapper/constants';
|
||||
import { AlignedData } from 'uplot';
|
||||
|
||||
/**
|
||||
* Expands contiguous runs of `null` values to the left and right of their
|
||||
* original positions so that visual gaps in the series are continuous.
|
||||
*
|
||||
* This is used when `NULL_EXPAND` mode is selected while joining series.
|
||||
*/
|
||||
function propagateNullsAcrossNeighbors(
|
||||
seriesValues: Array<number | null>,
|
||||
nullIndices: number[],
|
||||
alignedLength: number,
|
||||
): void {
|
||||
for (
|
||||
let i = 0, currentIndex, lastExpandedNullIndex = -1;
|
||||
i < nullIndices.length;
|
||||
i++
|
||||
) {
|
||||
const nullIndex = nullIndices[i];
|
||||
|
||||
if (nullIndex > lastExpandedNullIndex) {
|
||||
// expand left until we hit a non-null value
|
||||
currentIndex = nullIndex - 1;
|
||||
while (currentIndex >= 0 && seriesValues[currentIndex] == null) {
|
||||
seriesValues[currentIndex--] = null;
|
||||
}
|
||||
|
||||
// expand right until we hit a non-null value
|
||||
currentIndex = nullIndex + 1;
|
||||
while (currentIndex < alignedLength && seriesValues[currentIndex] == null) {
|
||||
seriesValues[(lastExpandedNullIndex = currentIndex++)] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges multiple uPlot `AlignedData` tables into a single aligned table.
|
||||
*
|
||||
* - Merges and sorts all distinct x-values from each table.
|
||||
* - Re-aligns every series onto the merged x-axis.
|
||||
* - Applies per-series null handling (`NULL_REMOVE`, `NULL_RETAIN`, `NULL_EXPAND`).
|
||||
*/
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
export function mergeAlignedDataTables(
|
||||
alignedTables: AlignedData[],
|
||||
nullModes?: number[][],
|
||||
): AlignedData {
|
||||
let mergedXValues: Set<number>;
|
||||
|
||||
// eslint-disable-next-line prefer-const
|
||||
mergedXValues = new Set();
|
||||
|
||||
// Collect all unique x-values from every table.
|
||||
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
|
||||
const table = alignedTables[tableIndex];
|
||||
const xValues = table[0];
|
||||
const xLength = xValues.length;
|
||||
|
||||
for (let i = 0; i < xLength; i++) {
|
||||
mergedXValues.add(xValues[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Sorted, merged x-axis used by the final result.
|
||||
const alignedData: (number | null | undefined)[][] = [
|
||||
Array.from(mergedXValues).sort((a, b) => a - b),
|
||||
];
|
||||
|
||||
const alignedLength = alignedData[0].length;
|
||||
|
||||
// Map from x-value to its index in the merged x-axis.
|
||||
const xValueToIndexMap = new Map<number, number>();
|
||||
|
||||
for (let i = 0; i < alignedLength; i++) {
|
||||
xValueToIndexMap.set(alignedData[0][i] as number, i);
|
||||
}
|
||||
|
||||
// Re-align all series from all tables onto the merged x-axis.
|
||||
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
|
||||
const table = alignedTables[tableIndex];
|
||||
const xValues = table[0];
|
||||
|
||||
for (let seriesIndex = 1; seriesIndex < table.length; seriesIndex++) {
|
||||
const seriesValues = table[seriesIndex];
|
||||
|
||||
const alignedSeriesValues = Array(alignedLength).fill(undefined);
|
||||
|
||||
const nullHandlingMode = nullModes
|
||||
? nullModes[tableIndex][seriesIndex]
|
||||
: NULL_RETAIN;
|
||||
|
||||
const nullIndices: number[] = [];
|
||||
|
||||
for (let i = 0; i < seriesValues.length; i++) {
|
||||
const valueAtPoint = seriesValues[i];
|
||||
const alignedIndex = xValueToIndexMap.get(xValues[i]);
|
||||
|
||||
if (alignedIndex == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (valueAtPoint === null) {
|
||||
if (nullHandlingMode !== NULL_REMOVE) {
|
||||
alignedSeriesValues[alignedIndex] = valueAtPoint;
|
||||
|
||||
if (nullHandlingMode === NULL_EXPAND) {
|
||||
nullIndices.push(alignedIndex);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
alignedSeriesValues[alignedIndex] = valueAtPoint;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally expand nulls to visually preserve gaps.
|
||||
propagateNullsAcrossNeighbors(
|
||||
alignedSeriesValues,
|
||||
nullIndices,
|
||||
alignedLength,
|
||||
);
|
||||
|
||||
alignedData.push(alignedSeriesValues);
|
||||
}
|
||||
}
|
||||
|
||||
return alignedData as AlignedData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds histogram buckets from raw values.
|
||||
*
|
||||
* - Each value is mapped into a bucket via `getBucketForValue`.
|
||||
* - Counts how many values fall into each bucket.
|
||||
* - Optionally sorts buckets using the provided comparator.
|
||||
*/
|
||||
export function buildHistogramBuckets(
|
||||
values: number[],
|
||||
getBucketForValue: (value: number) => number,
|
||||
sortBuckets?: ((a: number, b: number) => number) | null,
|
||||
): AlignedData {
|
||||
const bucketMap = new Map<number, { value: number; count: number }>();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
let value = values[i];
|
||||
|
||||
if (value != null) {
|
||||
value = getBucketForValue(value);
|
||||
}
|
||||
|
||||
const bucket = bucketMap.get(value);
|
||||
|
||||
if (bucket) {
|
||||
bucket.count++;
|
||||
} else {
|
||||
bucketMap.set(value, { value, count: 1 });
|
||||
}
|
||||
}
|
||||
|
||||
const buckets = [...bucketMap.values()];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
|
||||
sortBuckets && buckets.sort((a, b) => sortBuckets(a.value, b.value));
|
||||
|
||||
const bucketValues = Array(buckets.length);
|
||||
const bucketCounts = Array(buckets.length);
|
||||
|
||||
for (let i = 0; i < buckets.length; i++) {
|
||||
bucketValues[i] = buckets[i].value;
|
||||
bucketCounts[i] = buckets[i].count;
|
||||
}
|
||||
|
||||
return [bucketValues, bucketCounts];
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates an `AlignedData` instance, replacing all `undefined` entries
|
||||
* with explicit `null` values so uPlot treats them as gaps.
|
||||
*/
|
||||
export function replaceUndefinedWithNullInAlignedData(
|
||||
data: AlignedData,
|
||||
): AlignedData {
|
||||
const seriesList = data as (number | null | undefined)[][];
|
||||
for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex++) {
|
||||
for (
|
||||
let pointIndex = 0;
|
||||
pointIndex < seriesList[seriesIndex].length;
|
||||
pointIndex++
|
||||
) {
|
||||
if (seriesList[seriesIndex][pointIndex] === undefined) {
|
||||
seriesList[seriesIndex][pointIndex] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the first histogram series has a leading "empty" bin so that
|
||||
* all series line up visually when rendered as bars.
|
||||
*
|
||||
* - Prepends a new x-value (first x - `bucketSize`) to the first series.
|
||||
* - Prepends `null` to all subsequent series at the same index.
|
||||
*/
|
||||
export function prependNullBinToFirstHistogramSeries(
|
||||
alignedData: AlignedData,
|
||||
bucketSize: number,
|
||||
): void {
|
||||
const seriesList = alignedData as (number | null)[][];
|
||||
if (
|
||||
seriesList.length > 0 &&
|
||||
seriesList[0].length > 0 &&
|
||||
seriesList[0][0] !== null
|
||||
) {
|
||||
seriesList[0].unshift(seriesList[0][0] - bucketSize);
|
||||
for (let seriesIndex = 1; seriesIndex < seriesList.length; seriesIndex++) {
|
||||
seriesList[seriesIndex].unshift(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import BarPanel from 'container/DashboardContainer/visualization/panels/BarPanel/BarPanel';
|
||||
import HistogramPanel from 'container/DashboardContainer/visualization/panels/HistogramPanel/HistogramPanel';
|
||||
|
||||
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
|
||||
import HistogramPanelWrapper from './HistogramPanelWrapper';
|
||||
import ListPanelWrapper from './ListPanelWrapper';
|
||||
import PiePanelWrapper from './PiePanelWrapper';
|
||||
import TablePanelWrapper from './TablePanelWrapper';
|
||||
@@ -17,7 +17,7 @@ export const PanelTypeVsPanelWrapper = {
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: null,
|
||||
[PANEL_TYPES.PIE]: PiePanelWrapper,
|
||||
[PANEL_TYPES.BAR]: BarPanel,
|
||||
[PANEL_TYPES.HISTOGRAM]: HistogramPanelWrapper,
|
||||
[PANEL_TYPES.HISTOGRAM]: HistogramPanel,
|
||||
};
|
||||
|
||||
export const DEFAULT_BUCKET_COUNT = 30;
|
||||
|
||||
@@ -3,7 +3,6 @@ import { MemoryRouter, Route } from 'react-router-dom';
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import { AppProvider } from 'providers/App/App';
|
||||
import MockQueryClientProvider from 'providers/test/MockQueryClientProvider';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
@@ -109,7 +108,7 @@ const createMockSpan = (): Span => ({
|
||||
statusMessage: '',
|
||||
tagMap: {
|
||||
'http.method': 'GET',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/users?page=1',
|
||||
'http.url': '/api/users?page=1',
|
||||
'http.status_code': '200',
|
||||
'service.name': 'frontend-service',
|
||||
'span.kind': 'server',
|
||||
|
||||
@@ -5,7 +5,6 @@ import getSpanPercentiles from 'api/trace/getSpanPercentiles';
|
||||
import getUserPreference from 'api/v1/user/preferences/name/get';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { QueryBuilderContext } from 'providers/QueryBuilder';
|
||||
@@ -879,9 +878,7 @@ describe('SpanDetailsDrawer', () => {
|
||||
|
||||
// Verify only matching attributes are shown (use getAllByText for all since they appear in multiple places)
|
||||
expect(screen.getAllByText('http.method').length).toBeGreaterThan(0);
|
||||
expect(screen.getAllByText(SPAN_ATTRIBUTES.HTTP_URL).length).toBeGreaterThan(
|
||||
0,
|
||||
);
|
||||
expect(screen.getAllByText('http.url').length).toBeGreaterThan(0);
|
||||
expect(screen.getAllByText('http.status_code').length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
@@ -1129,7 +1126,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
|
||||
|
||||
// User sees all attributes initially
|
||||
expect(screen.getByText('http.method')).toBeInTheDocument();
|
||||
expect(screen.getByText(SPAN_ATTRIBUTES.HTTP_URL)).toBeInTheDocument();
|
||||
expect(screen.getByText('http.url')).toBeInTheDocument();
|
||||
expect(screen.getByText('http.status_code')).toBeInTheDocument();
|
||||
|
||||
// User types "method" in search
|
||||
@@ -1139,7 +1136,7 @@ describe('SpanDetailsDrawer - Search Visibility User Flows', () => {
|
||||
// User sees only matching attributes
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('http.method')).toBeInTheDocument();
|
||||
expect(screen.queryByText(SPAN_ATTRIBUTES.HTTP_URL)).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('http.url')).not.toBeInTheDocument();
|
||||
expect(screen.queryByText('http.status_code')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { Span } from 'types/api/trace/getTraceV2';
|
||||
|
||||
@@ -23,7 +22,7 @@ export const mockSpan: Span = {
|
||||
event: [],
|
||||
tagMap: {
|
||||
'http.method': 'GET',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: '/api/test',
|
||||
'http.url': '/api/test',
|
||||
'http.status_code': '200',
|
||||
},
|
||||
hasError: false,
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
import { HistogramTooltipProps } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
|
||||
export default function HistogramTooltip(
|
||||
props: HistogramTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} showTooltipHeader={false} />;
|
||||
}
|
||||
@@ -16,12 +16,16 @@ export default function Tooltip({
|
||||
uPlotInstance,
|
||||
timezone,
|
||||
content,
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const tooltipContent = content ?? [];
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
if (!showTooltipHeader) {
|
||||
return null;
|
||||
}
|
||||
const data = uPlotInstance.data;
|
||||
const cursorIdx = uPlotInstance.cursor.idx;
|
||||
if (cursorIdx == null) {
|
||||
@@ -30,7 +34,12 @@ export default function Tooltip({
|
||||
return dayjs(data[0][cursorIdx] * 1000)
|
||||
.tz(timezone)
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
}, [timezone, uPlotInstance.data, uPlotInstance.cursor.idx]);
|
||||
}, [
|
||||
timezone,
|
||||
uPlotInstance.data,
|
||||
uPlotInstance.cursor.idx,
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -39,9 +48,11 @@ export default function Tooltip({
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
>
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
|
||||
@@ -60,6 +60,7 @@ export interface TooltipRenderArgs {
|
||||
}
|
||||
|
||||
export interface BaseTooltipProps {
|
||||
showTooltipHeader?: boolean;
|
||||
timezone: string;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
@@ -74,7 +75,14 @@ export interface BarTooltipProps extends BaseTooltipProps, TooltipRenderArgs {
|
||||
isStackedBarChart?: boolean;
|
||||
}
|
||||
|
||||
export type TooltipProps = TimeSeriesTooltipProps | BarTooltipProps;
|
||||
export interface HistogramTooltipProps
|
||||
extends BaseTooltipProps,
|
||||
TooltipRenderArgs {}
|
||||
|
||||
export type TooltipProps =
|
||||
| TimeSeriesTooltipProps
|
||||
| BarTooltipProps
|
||||
| HistogramTooltipProps;
|
||||
|
||||
export enum LegendPosition {
|
||||
BOTTOM = 'bottom',
|
||||
|
||||
@@ -387,7 +387,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
|
||||
} = this.getVisibilityResolutionState();
|
||||
|
||||
config.series = [
|
||||
{ value: (): string => '' }, // Base series for timestamp
|
||||
{ value: (): string => '', label: 'Timestamp' }, // Base series for timestamp
|
||||
...this.series.map((s) => {
|
||||
const series = s.getConfig();
|
||||
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
|
||||
|
||||
@@ -49,7 +49,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
}: {
|
||||
resolvedLineColor: string;
|
||||
}): Partial<Series> {
|
||||
const { lineWidth, lineStyle, lineCap } = this.props;
|
||||
const { lineWidth, lineStyle, lineCap, fillColor } = this.props;
|
||||
const lineConfig: Partial<Series> = {
|
||||
stroke: resolvedLineColor,
|
||||
width: lineWidth ?? 2,
|
||||
@@ -63,8 +63,12 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
lineConfig.cap = lineCap;
|
||||
}
|
||||
|
||||
if (this.props.panelType === PANEL_TYPES.BAR) {
|
||||
if (fillColor) {
|
||||
lineConfig.fill = fillColor;
|
||||
} else if (this.props.panelType === PANEL_TYPES.BAR) {
|
||||
lineConfig.fill = resolvedLineColor;
|
||||
} else if (this.props.panelType === PANEL_TYPES.HISTOGRAM) {
|
||||
lineConfig.fill = `${resolvedLineColor}40`;
|
||||
}
|
||||
|
||||
return lineConfig;
|
||||
@@ -147,6 +151,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
pointsConfig.show = false;
|
||||
} else if (showPoints === VisibilityMode.Always) {
|
||||
pointsConfig.show = true;
|
||||
} else {
|
||||
pointsConfig.show = false; // default to hidden
|
||||
}
|
||||
|
||||
return pointsConfig;
|
||||
|
||||
@@ -175,6 +175,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
pointsBuilder?: Series.Points.Show;
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
fillColor?: string;
|
||||
isDarkMode?: boolean;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
@@ -11,22 +11,6 @@ import { Threshold } from '../hooks/types';
|
||||
import { findMinMaxThresholdValues } from './threshold';
|
||||
import { LogScaleLimits, RangeFunctionParams } from './types';
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps min/max/softMin/softMax to valid log-scale values (powers of logBase).
|
||||
* Only applies when distribution is logarithmic; otherwise returns limits unchanged.
|
||||
@@ -213,25 +197,6 @@ function getLogScaleRange(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps linear scale min down and max up to whole numbers so axis bounds are clean.
|
||||
*/
|
||||
function roundLinearRange(minMax: Range.MinMax): Range.MinMax {
|
||||
const [currentMin, currentMax] = minMax;
|
||||
let roundedMin = currentMin;
|
||||
let roundedMax = currentMax;
|
||||
|
||||
if (roundedMin != null) {
|
||||
roundedMin = incrRoundDn(roundedMin, 1);
|
||||
}
|
||||
|
||||
if (roundedMax != null) {
|
||||
roundedMax = incrRoundUp(roundedMax, 1);
|
||||
}
|
||||
|
||||
return [roundedMin, roundedMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps log-scale [min, max] to exact powers of logBase (nearest magnitude below/above).
|
||||
* If min and max would be equal after snapping, max is increased by one magnitude so the range is valid.
|
||||
@@ -330,7 +295,6 @@ export function createRangeFunction(
|
||||
|
||||
if (scale.distr === 1) {
|
||||
minMax = getLinearScaleRange(minMax, params, dataMin, dataMax);
|
||||
minMax = roundLinearRange(minMax);
|
||||
} else if (scale.distr === 3) {
|
||||
minMax = getLogScaleRange(minMax, params, dataMin, dataMax, logBase);
|
||||
const logFn = scale.log === 2 ? Math.log2 : Math.log10;
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
|
||||
/* eslint-disable sonarjs/no-duplicate-string */
|
||||
export const traceDetailResponse = [
|
||||
{
|
||||
@@ -37,7 +35,7 @@ export const traceDetailResponse = [
|
||||
'component',
|
||||
'host.name',
|
||||
'http.method',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'http.url',
|
||||
'ip',
|
||||
'http.status_code',
|
||||
'opencensus.exporterversion',
|
||||
@@ -86,7 +84,7 @@ export const traceDetailResponse = [
|
||||
'signoz.collector.id',
|
||||
'component',
|
||||
'http.method',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'http.url',
|
||||
'ip',
|
||||
],
|
||||
[
|
||||
@@ -743,7 +741,7 @@ export const traceDetailResponse = [
|
||||
'component',
|
||||
'http.method',
|
||||
'http.status_code',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'http.url',
|
||||
'net/http.reused',
|
||||
'net/http.was_idle',
|
||||
'service.name',
|
||||
@@ -835,7 +833,7 @@ export const traceDetailResponse = [
|
||||
'opencensus.exporterversion',
|
||||
'signoz.collector.id',
|
||||
'host.name',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'http.url',
|
||||
'net/http.reused',
|
||||
'net/http.was_idle',
|
||||
],
|
||||
@@ -918,7 +916,7 @@ export const traceDetailResponse = [
|
||||
'net/http.was_idle',
|
||||
'component',
|
||||
'host.name',
|
||||
SPAN_ATTRIBUTES.HTTP_URL,
|
||||
'http.url',
|
||||
'ip',
|
||||
'service.name',
|
||||
'signoz.collector.id',
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react';
|
||||
import { getAttributesValues } from 'api/queryBuilder/getAttributesValues';
|
||||
import { DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY } from 'constants/queryBuilder';
|
||||
import { SPAN_ATTRIBUTES } from 'container/ApiMonitoring/Explorer/Domains/DomainDetails/constants';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
@@ -32,7 +31,7 @@ export const AllTraceFilterKeyValue: Record<string, string> = {
|
||||
httpRoute: 'HTTP Route',
|
||||
'http.route': 'HTTP Route',
|
||||
httpUrl: 'HTTP URL',
|
||||
[SPAN_ATTRIBUTES.HTTP_URL]: 'HTTP URL',
|
||||
'http.url': 'HTTP URL',
|
||||
traceID: 'Trace ID',
|
||||
trace_id: 'Trace ID',
|
||||
} as const;
|
||||
|
||||
@@ -3,3 +3,33 @@
|
||||
* Example: 1.5 → 2, 1.49 → 1
|
||||
*/
|
||||
export const roundHalfUp = (value: number): number => Math.floor(value + 0.5);
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number to the nearest multiple of 10^dec.
|
||||
* Used for decimal precision.
|
||||
*/
|
||||
export function roundDecimals(val: number, dec = 0): number {
|
||||
if (Number.isInteger(val)) {
|
||||
return val;
|
||||
}
|
||||
|
||||
const p = 10 ** dec;
|
||||
const n = val * p * (1 + Number.EPSILON);
|
||||
return Math.round(n) / p;
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ package flagger
|
||||
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -18,14 +17,6 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureInterpolationEnabled,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether to enable interpolation",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureKafkaSpanEval,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
31
pkg/modules/identity/identity.go
Normal file
31
pkg/modules/identity/identity.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package identity
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Module interface {
|
||||
// GetRoles gets all roles for an identity
|
||||
GetRoles(ctx context.Context, identityID valuer.UUID) ([]types.Role, error)
|
||||
|
||||
// GetRolesForIdentities gets roles for multiple identities (batch)
|
||||
GetRolesForIdentities(ctx context.Context, identityIDs []valuer.UUID) (map[valuer.UUID][]types.Role, error)
|
||||
|
||||
// CountByRoleAndOrgID counts identities with a specific role in an org
|
||||
CountByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) (int64, error)
|
||||
|
||||
// CreateIdentityWithRoles creates an identity and assigns roles to it
|
||||
CreateIdentityWithRoles(ctx context.Context, id valuer.UUID, orgID valuer.UUID, roles []types.Role) error
|
||||
|
||||
// AddRole adds a role to an identity
|
||||
AddRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error
|
||||
|
||||
// RemoveRole removes a role from an identity
|
||||
RemoveRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error
|
||||
|
||||
// DeleteIdentity deletes an identity and its associated roles
|
||||
DeleteIdentity(ctx context.Context, identityID valuer.UUID) error
|
||||
}
|
||||
111
pkg/modules/identity/implidentity/module.go
Normal file
111
pkg/modules/identity/implidentity/module.go
Normal file
@@ -0,0 +1,111 @@
|
||||
package implidentity
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/identitytypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type module struct {
|
||||
store *store
|
||||
}
|
||||
|
||||
func NewModule(sqlstore sqlstore.SQLStore) identity.Module {
|
||||
return &module{
|
||||
store: newStore(sqlstore),
|
||||
}
|
||||
}
|
||||
|
||||
func (m *module) CreateIdentityWithRoles(ctx context.Context, id valuer.UUID, orgID valuer.UUID, roles []types.Role) error {
|
||||
return m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Create identity
|
||||
ident := identitytypes.NewStorableIdentity(id, orgID)
|
||||
if err := m.store.CreateIdentity(ctx, ident); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create identity_role mappings for each role
|
||||
for _, role := range roles {
|
||||
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
|
||||
identityRole := identitytypes.NewStorableIdentityRole(id, roleName)
|
||||
if err := m.store.CreateIdentityRole(ctx, identityRole); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (m *module) AddRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error {
|
||||
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
|
||||
identityRole := identitytypes.NewStorableIdentityRole(identityID, roleName)
|
||||
return m.store.CreateIdentityRole(ctx, identityRole)
|
||||
}
|
||||
|
||||
func (m *module) RemoveRole(ctx context.Context, identityID valuer.UUID, orgID valuer.UUID, role types.Role) error {
|
||||
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
|
||||
return m.store.DeleteIdentityRole(ctx, identityID, roleName)
|
||||
}
|
||||
|
||||
func (m *module) GetRoles(ctx context.Context, identityID valuer.UUID) ([]types.Role, error) {
|
||||
roleNames, err := m.store.GetRolesByIdentityID(ctx, identityID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
roles := make([]types.Role, 0, len(roleNames))
|
||||
for _, roleName := range roleNames {
|
||||
roles = append(roles, roletypes.GetRoleFromManagedRoleName(roleName))
|
||||
}
|
||||
|
||||
return roles, nil
|
||||
}
|
||||
|
||||
func (m *module) GetRolesForIdentities(ctx context.Context, identityIDs []valuer.UUID) (map[valuer.UUID][]types.Role, error) {
|
||||
roleNamesMap, err := m.store.GetRolesForIdentityIDs(ctx, identityIDs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make(map[valuer.UUID][]types.Role)
|
||||
for _, id := range identityIDs {
|
||||
if roleNames, ok := roleNamesMap[id.StringValue()]; ok {
|
||||
roles := make([]types.Role, 0, len(roleNames))
|
||||
for _, roleName := range roleNames {
|
||||
roles = append(roles, roletypes.GetRoleFromManagedRoleName(roleName))
|
||||
}
|
||||
result[id] = roles
|
||||
} else {
|
||||
result[id] = []types.Role{}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (m *module) DeleteIdentity(ctx context.Context, identityID valuer.UUID) error {
|
||||
return m.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Delete identity_role first (FK constraint)
|
||||
if err := m.store.DeleteIdentityRoles(ctx, identityID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Delete identity
|
||||
if err := m.store.DeleteIdentity(ctx, identityID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (m *module) CountByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) (int64, error) {
|
||||
roleName := roletypes.MustGetSigNozManagedRoleFromExistingRole(role)
|
||||
return m.store.CountByRoleNameAndOrgID(ctx, roleName, orgID)
|
||||
}
|
||||
152
pkg/modules/identity/implidentity/store.go
Normal file
152
pkg/modules/identity/implidentity/store.go
Normal file
@@ -0,0 +1,152 @@
|
||||
package implidentity
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/SigNoz/signoz/pkg/types/identitytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/uptrace/bun"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func newStore(sqlstore sqlstore.SQLStore) *store {
|
||||
return &store{sqlstore: sqlstore}
|
||||
}
|
||||
|
||||
func (s *store) CreateIdentity(ctx context.Context, identity *identitytypes.StorableIdentity) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(identity).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create identity")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) CreateIdentityRole(ctx context.Context, identityRole *identitytypes.StorableIdentityRole) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewInsert().
|
||||
Model(identityRole).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to create identity role")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) DeleteIdentityRole(ctx context.Context, identityID valuer.UUID, roleName string) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&identitytypes.StorableIdentityRole{}).
|
||||
Where("identity_id = ?", identityID).
|
||||
Where("role_name = ?", roleName).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity role")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) DeleteIdentityRoles(ctx context.Context, identityID valuer.UUID) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&identitytypes.StorableIdentityRole{}).
|
||||
Where("identity_id = ?", identityID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity roles")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) DeleteIdentity(ctx context.Context, identityID valuer.UUID) error {
|
||||
_, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewDelete().
|
||||
Model(&identitytypes.StorableIdentity{}).
|
||||
Where("id = ?", identityID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to delete identity")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *store) GetRolesByIdentityID(ctx context.Context, identityID valuer.UUID) ([]string, error) {
|
||||
var roleNames []string
|
||||
err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model((*identitytypes.StorableIdentityRole)(nil)).
|
||||
Column("role_name").
|
||||
Where("identity_id = ?", identityID).
|
||||
Scan(ctx, &roleNames)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get roles for identity %s", identityID)
|
||||
}
|
||||
return roleNames, nil
|
||||
}
|
||||
|
||||
func (s *store) GetRolesForIdentityIDs(ctx context.Context, identityIDs []valuer.UUID) (map[string][]string, error) {
|
||||
if len(identityIDs) == 0 {
|
||||
return map[string][]string{}, nil
|
||||
}
|
||||
|
||||
var results []identitytypes.StorableIdentityRole
|
||||
err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&results).
|
||||
Column("identity_id", "role_name").
|
||||
Where("identity_id IN (?)", bun.In(identityIDs)).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to get roles for identities")
|
||||
}
|
||||
|
||||
roleMap := make(map[string][]string)
|
||||
for _, r := range results {
|
||||
roleMap[r.IdentityID.StringValue()] = append(roleMap[r.IdentityID.StringValue()], r.RoleName)
|
||||
}
|
||||
|
||||
return roleMap, nil
|
||||
}
|
||||
|
||||
func (s *store) RunInTx(ctx context.Context, cb func(ctx context.Context) error) error {
|
||||
return s.sqlstore.RunInTxCtx(ctx, nil, func(ctx context.Context) error {
|
||||
return cb(ctx)
|
||||
})
|
||||
}
|
||||
|
||||
func (s *store) CountByRoleNameAndOrgID(ctx context.Context, roleName string, orgID valuer.UUID) (int64, error) {
|
||||
count, err := s.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model((*identitytypes.StorableIdentityRole)(nil)).
|
||||
Join("JOIN identity ON identity.id = identity_role.identity_id").
|
||||
Where("identity_role.role_name = ?", roleName).
|
||||
Where("identity.org_id = ?", orgID).
|
||||
Count(ctx)
|
||||
if err != nil {
|
||||
return 0, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to count identities by role")
|
||||
}
|
||||
return int64(count), nil
|
||||
}
|
||||
@@ -30,3 +30,7 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
|
||||
|
||||
return module.store.ListByKeyRange(ctx, start, end)
|
||||
}
|
||||
|
||||
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
return module.store.GetByName(ctx, name)
|
||||
}
|
||||
|
||||
@@ -47,6 +47,22 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
organization := new(types.Organization)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(organization).
|
||||
Where("name = ?", name).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
|
||||
}
|
||||
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
organizations := make([]*types.Organization, 0)
|
||||
err := store.
|
||||
|
||||
@@ -14,6 +14,9 @@ type Getter interface {
|
||||
|
||||
// ListByOwnedKeyRange gets all the organizations owned by the instance
|
||||
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
|
||||
|
||||
// Gets the organization by name
|
||||
GetByName(context.Context, string) (*types.Organization, error)
|
||||
}
|
||||
|
||||
type Setter interface {
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -28,9 +29,10 @@ type module struct {
|
||||
authDomain authdomain.Module
|
||||
tokenizer tokenizer.Tokenizer
|
||||
orgGetter organization.Getter
|
||||
identity identity.Module
|
||||
}
|
||||
|
||||
func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter) session.Module {
|
||||
func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.AuthNProvider]authn.AuthN, user user.Module, userGetter user.Getter, authDomain authdomain.Module, tokenizer tokenizer.Tokenizer, orgGetter organization.Getter, identity identity.Module) session.Module {
|
||||
return &module{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/session/implsession"),
|
||||
authNs: authNs,
|
||||
@@ -39,6 +41,7 @@ func NewModule(providerSettings factory.ProviderSettings, authNs map[authtypes.A
|
||||
authDomain: authDomain,
|
||||
tokenizer: tokenizer,
|
||||
orgGetter: orgGetter,
|
||||
identity: identity,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,17 +144,33 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
roleMapping := authDomain.AuthDomainConfig().RoleMapping
|
||||
role := roleMapping.NewRoleFromCallbackIdentity(callbackIdentity)
|
||||
|
||||
user, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, role, callbackIdentity.OrgID)
|
||||
newUser, err := types.NewUser(callbackIdentity.Name, callbackIdentity.Email, callbackIdentity.OrgID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
user, err = module.user.GetOrCreateUser(ctx, user)
|
||||
createdUser, err := module.user.GetOrCreateUser(ctx, newUser, user.WithRole(role))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
|
||||
if err := createdUser.ErrIfRoot(); err != nil {
|
||||
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
|
||||
}
|
||||
|
||||
// Get roles from identity module
|
||||
userRoles, err := module.identity.GetRoles(ctx, createdUser.IdentityID)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Use first role for token creation (backward compatibility with current token system)
|
||||
var primaryRole types.Role
|
||||
if len(userRoles) > 0 {
|
||||
primaryRole = userRoles[0]
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(createdUser.ID, createdUser.OrgID, createdUser.Email, primaryRole), map[string]string{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
@@ -5,11 +5,26 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Password PasswordConfig `mapstructure:"password"`
|
||||
Root RootConfig `mapstructure:"root"`
|
||||
}
|
||||
|
||||
type RootConfig struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
Email valuer.Email `mapstructure:"email"`
|
||||
Password string `mapstructure:"password"`
|
||||
Org OrgConfig `mapstructure:"org"`
|
||||
}
|
||||
|
||||
type OrgConfig struct {
|
||||
Name string `mapstructure:"name"`
|
||||
}
|
||||
|
||||
type PasswordConfig struct {
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
}
|
||||
@@ -31,6 +46,12 @@ func newConfig() factory.Config {
|
||||
MaxTokenLifetime: 6 * time.Hour,
|
||||
},
|
||||
},
|
||||
Root: RootConfig{
|
||||
Enabled: false,
|
||||
Org: OrgConfig{
|
||||
Name: "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,5 +60,17 @@ func (c Config) Validate() error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Root.Enabled {
|
||||
if c.Root.Email.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
|
||||
}
|
||||
if c.Root.Password == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
|
||||
}
|
||||
if !types.IsPasswordValid(c.Root.Password) {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -16,6 +16,10 @@ func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
return module.store.GetRootUserByOrgID(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
users, err := module.store.ListUsersByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/emailing"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
root "github.com/SigNoz/signoz/pkg/modules/user"
|
||||
@@ -33,10 +34,11 @@ type Module struct {
|
||||
authz authz.AuthZ
|
||||
analytics analytics.Analytics
|
||||
config user.Config
|
||||
identity identity.Module
|
||||
}
|
||||
|
||||
// This module is a WIP, don't take inspiration from this.
|
||||
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config) root.Module {
|
||||
func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing emailing.Emailing, providerSettings factory.ProviderSettings, orgSetter organization.Setter, authz authz.AuthZ, analytics analytics.Analytics, config user.Config, identity identity.Module) root.Module {
|
||||
settings := factory.NewScopedProviderSettings(providerSettings, "github.com/SigNoz/signoz/pkg/modules/user/impluser")
|
||||
return &Module{
|
||||
store: store,
|
||||
@@ -47,6 +49,7 @@ func NewModule(store types.UserStore, tokenizer tokenizer.Tokenizer, emailing em
|
||||
analytics: analytics,
|
||||
authz: authz,
|
||||
config: config,
|
||||
identity: identity,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +59,7 @@ func (m *Module) AcceptInvite(ctx context.Context, token string, password string
|
||||
return nil, err
|
||||
}
|
||||
|
||||
user, err := types.NewUser(invite.Name, invite.Email, invite.Role, invite.OrgID)
|
||||
user, err := types.NewUser(invite.Name, invite.Email, invite.OrgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -66,7 +69,7 @@ func (m *Module) AcceptInvite(ctx context.Context, token string, password string
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = m.CreateUser(ctx, user, root.WithFactorPassword(factorPassword))
|
||||
err = m.CreateUser(ctx, user, root.WithFactorPassword(factorPassword), root.WithRole(invite.Role))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -103,6 +106,12 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
|
||||
}
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
|
||||
}
|
||||
@@ -166,14 +175,21 @@ func (m *Module) DeleteInvite(ctx context.Context, orgID string, id valuer.UUID)
|
||||
|
||||
func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
role := createUserOpts.Role
|
||||
|
||||
// since assign is idempotant multiple calls to assign won't cause issues in case of retries.
|
||||
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(input.Role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
err := module.authz.Grant(ctx, input.OrgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(role), authtypes.MustNewSubject(authtypes.TypeableUser, input.ID.StringValue(), input.OrgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Create identity with roles via identity module
|
||||
if err := module.identity.CreateIdentityWithRoles(ctx, input.ID, input.OrgID, []types.Role{role}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create user
|
||||
if err := module.store.CreateUser(ctx, input); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -189,7 +205,7 @@ func (module *Module) CreateUser(ctx context.Context, input *types.User, opts ..
|
||||
return err
|
||||
}
|
||||
|
||||
traitsOrProperties := types.NewTraitsFromUser(input)
|
||||
traitsOrProperties := types.NewTraitsFromUser(input, []types.Role{role})
|
||||
module.analytics.IdentifyUser(ctx, input.OrgID.String(), input.ID.String(), traitsOrProperties)
|
||||
module.analytics.TrackUser(ctx, input.OrgID.String(), input.ID.String(), "User Created", traitsOrProperties)
|
||||
|
||||
@@ -202,66 +218,34 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
return nil, err
|
||||
}
|
||||
|
||||
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
|
||||
if err != nil {
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot update root user")
|
||||
}
|
||||
|
||||
existingUser.Update(user.DisplayName)
|
||||
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only displayName, role can be updated
|
||||
if user.DisplayName == "" {
|
||||
user.DisplayName = existingUser.DisplayName
|
||||
return existingUser, nil
|
||||
}
|
||||
|
||||
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if user.Role == "" {
|
||||
user.Role = existingUser.Role
|
||||
// Get roles from identity module for analytics
|
||||
roles, _ := module.identity.GetRoles(ctx, user.IdentityID)
|
||||
traits := types.NewTraitsFromUser(user, roles)
|
||||
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
|
||||
}
|
||||
|
||||
// Make sure that th e request is not demoting the last admin user.
|
||||
// also an admin user can only change role of their own or other user
|
||||
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(adminUsers) == 1 {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot demote the last admin")
|
||||
}
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(updatedUser)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return updatedUser, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
|
||||
@@ -270,23 +254,45 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot delete root user")
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
|
||||
}
|
||||
|
||||
// don't allow to delete the last admin user
|
||||
adminUsers, err := module.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
// Get user's roles from identity module
|
||||
userRoles, err := module.identity.GetRoles(ctx, user.IdentityID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(adminUsers) == 1 && user.Role == types.RoleAdmin {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
// Check if user has admin role
|
||||
hasAdminRole := slices.Contains(userRoles, types.RoleAdmin)
|
||||
|
||||
// don't allow to delete the last admin user
|
||||
if hasAdminRole {
|
||||
adminCount, err := module.identity.CountByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if adminCount == 1 {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot delete the last admin")
|
||||
}
|
||||
}
|
||||
|
||||
// since revoke is idempotant multiple calls to revoke won't cause issues in case of retries
|
||||
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(user.Role), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
// Revoke all roles for the user
|
||||
for _, userRole := range userRoles {
|
||||
err = module.authz.Revoke(ctx, orgID, roletypes.MustGetSigNozManagedRoleFromExistingRole(userRole), authtypes.MustNewSubject(authtypes.TypeableUser, id, orgID, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Delete identity and identity_role records
|
||||
if err := module.identity.DeleteIdentity(ctx, user.IdentityID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -302,6 +308,15 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
}
|
||||
|
||||
func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID valuer.UUID) (*types.ResetPasswordToken, error) {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
@@ -364,6 +379,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
|
||||
if err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
|
||||
@@ -407,6 +426,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
return err
|
||||
}
|
||||
|
||||
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
if err := password.Update(passwd); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -415,6 +443,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
}
|
||||
|
||||
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot change password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -476,7 +513,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
|
||||
}
|
||||
|
||||
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
|
||||
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
|
||||
user, err := types.NewRootUser(name, email, organization.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -505,7 +542,7 @@ func (module *Module) CreateFirstUser(ctx context.Context, organization *types.O
|
||||
return err
|
||||
}
|
||||
|
||||
err = module.createUserWithoutGrant(ctx, user, root.WithFactorPassword(password))
|
||||
err = module.createUserWithoutGrant(ctx, user, root.WithFactorPassword(password), root.WithRole(types.RoleAdmin))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -535,7 +572,15 @@ func (module *Module) Collect(ctx context.Context, orgID valuer.UUID) (map[strin
|
||||
|
||||
func (module *Module) createUserWithoutGrant(ctx context.Context, input *types.User, opts ...root.CreateUserOption) error {
|
||||
createUserOpts := root.NewCreateUserOptions(opts...)
|
||||
role := createUserOpts.Role
|
||||
|
||||
if err := module.store.RunInTx(ctx, func(ctx context.Context) error {
|
||||
// Create identity with roles via identity module
|
||||
if err := module.identity.CreateIdentityWithRoles(ctx, input.ID, input.OrgID, []types.Role{role}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create user
|
||||
if err := module.store.CreateUser(ctx, input); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -551,7 +596,7 @@ func (module *Module) createUserWithoutGrant(ctx context.Context, input *types.U
|
||||
return err
|
||||
}
|
||||
|
||||
traitsOrProperties := types.NewTraitsFromUser(input)
|
||||
traitsOrProperties := types.NewTraitsFromUser(input, []types.Role{role})
|
||||
module.analytics.IdentifyUser(ctx, input.OrgID.String(), input.ID.String(), traitsOrProperties)
|
||||
module.analytics.TrackUser(ctx, input.OrgID.String(), input.ID.String(), "User Created", traitsOrProperties)
|
||||
|
||||
|
||||
211
pkg/modules/user/impluser/service.go
Normal file
211
pkg/modules/user/impluser/service.go
Normal file
@@ -0,0 +1,211 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type service struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store types.UserStore
|
||||
module user.Module
|
||||
orgGetter organization.Getter
|
||||
authz authz.AuthZ
|
||||
identity identity.Module
|
||||
config user.RootConfig
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
providerSettings factory.ProviderSettings,
|
||||
store types.UserStore,
|
||||
module user.Module,
|
||||
orgGetter organization.Getter,
|
||||
authz authz.AuthZ,
|
||||
identity identity.Module,
|
||||
config user.RootConfig,
|
||||
) user.Service {
|
||||
return &service{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
|
||||
store: store,
|
||||
module: module,
|
||||
orgGetter: orgGetter,
|
||||
authz: authz,
|
||||
identity: identity,
|
||||
config: config,
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error {
|
||||
if !s.config.Enabled {
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
err := s.reconcile(ctx)
|
||||
if err == nil {
|
||||
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
|
||||
|
||||
select {
|
||||
case <-s.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Stop(ctx context.Context) error {
|
||||
close(s.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) reconcile(ctx context.Context) error {
|
||||
org, err := s.orgGetter.GetByName(ctx, s.config.Org.Name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
newOrg := types.NewOrganization(s.config.Org.Name, s.config.Org.Name)
|
||||
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return s.reconcileRootUser(ctx, org.ID)
|
||||
}
|
||||
|
||||
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingRoot == nil {
|
||||
return s.createOrPromoteRootUser(ctx, orgID)
|
||||
}
|
||||
|
||||
return s.updateExistingRootUser(ctx, orgID, existingRoot)
|
||||
}
|
||||
|
||||
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
// Get user's current roles from identity module
|
||||
userRoles, err := s.identity.GetRoles(ctx, existingUser.IdentityID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
existingUser.PromoteToRoot()
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If user doesn't have admin role, modify the grant and update identity roles
|
||||
if !slices.Contains(userRoles, types.RoleAdmin) {
|
||||
// Update authz grants and identity roles
|
||||
for _, oldRole := range userRoles {
|
||||
// Update authz grant
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Update identity role
|
||||
if err := s.identity.RemoveRole(ctx, existingUser.IdentityID, orgID, oldRole); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Add admin role to identity
|
||||
if err := s.identity.AddRole(ctx, existingUser.IdentityID, orgID, types.RoleAdmin); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingUser.ID)
|
||||
}
|
||||
|
||||
// Create new root user
|
||||
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword), user.WithRole(types.RoleAdmin))
|
||||
}
|
||||
|
||||
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
|
||||
existingRoot.PromoteToRoot()
|
||||
|
||||
if existingRoot.Email != s.config.Email {
|
||||
existingRoot.UpdateEmail(s.config.Email)
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingRoot.ID)
|
||||
}
|
||||
|
||||
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
|
||||
password, err := s.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.CreatePassword(ctx, factorPassword)
|
||||
}
|
||||
|
||||
if !password.Equals(s.config.Password) {
|
||||
if err := password.Update(s.config.Password); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.UpdatePassword(ctx, password)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -192,38 +192,23 @@ func (store *store) GetUserByEmailAndOrgID(ctx context.Context, email valuer.Ema
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role, orgID valuer.UUID) ([]*types.User, error) {
|
||||
var users []*types.User
|
||||
|
||||
err := store.
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(&users).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("role = ?", role).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
|
||||
user.UpdatedAt = time.Now()
|
||||
_, err := store.sqlstore.BunDB().NewUpdate().
|
||||
NewUpdate().
|
||||
Model(user).
|
||||
Column("display_name").
|
||||
Column("role").
|
||||
Column("email").
|
||||
Column("is_root").
|
||||
Column("updated_at").
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", user.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
|
||||
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
|
||||
}
|
||||
return user, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
|
||||
@@ -602,6 +587,23 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
|
||||
})
|
||||
}
|
||||
|
||||
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
user := new(types.User)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(user).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("is_root = ?", true).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
|
||||
users := []*types.User{}
|
||||
err := store.
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
type createUserOptions struct {
|
||||
FactorPassword *types.FactorPassword
|
||||
Role types.Role
|
||||
}
|
||||
|
||||
type CreateUserOption func(*createUserOptions)
|
||||
@@ -17,9 +18,16 @@ func WithFactorPassword(factorPassword *types.FactorPassword) CreateUserOption {
|
||||
}
|
||||
}
|
||||
|
||||
func WithRole(role types.Role) CreateUserOption {
|
||||
return func(o *createUserOptions) {
|
||||
o.Role = role
|
||||
}
|
||||
}
|
||||
|
||||
func NewCreateUserOptions(opts ...CreateUserOption) *createUserOptions {
|
||||
o := &createUserOptions{
|
||||
FactorPassword: nil,
|
||||
Role: types.RoleViewer, // default role
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
|
||||
7
pkg/modules/user/service.go
Normal file
7
pkg/modules/user/service.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package user
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type Service interface {
|
||||
factory.Service
|
||||
}
|
||||
@@ -34,6 +34,9 @@ type Module interface {
|
||||
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
|
||||
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
|
||||
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
|
||||
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
|
||||
|
||||
// invite
|
||||
@@ -54,6 +57,9 @@ type Module interface {
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get root user by org id.
|
||||
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
|
||||
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ type APIHandlerOpts struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Signoz.Cache,
|
||||
@@ -270,6 +270,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// If the root user is enabled, the setup is complete
|
||||
if config.User.Root.Enabled {
|
||||
aH.SetupCompleted = true
|
||||
}
|
||||
|
||||
aH.Upgrader = &websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
@@ -2045,7 +2050,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
organization := types.NewOrganization(req.OrgDisplayName)
|
||||
organization := types.NewOrganization(req.OrgDisplayName, req.OrgName)
|
||||
user, errv2 := aH.Signoz.Modules.User.CreateFirstUser(r.Context(), organization, req.Name, req.Email, req.Password)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
|
||||
@@ -135,7 +135,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -43,11 +43,11 @@ var (
|
||||
// FromUnit returns a converter for the given unit
|
||||
func FromUnit(u Unit) Converter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
|
||||
return DurationConverter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
|
||||
return DataConverter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
|
||||
return DataRateConverter
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentConverter
|
||||
|
||||
@@ -58,36 +58,80 @@ func (*dataConverter) Name() string {
|
||||
return "data"
|
||||
}
|
||||
|
||||
// Notation followed by UCUM:
|
||||
// https://ucum.org/ucum
|
||||
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
|
||||
// kilo = k, mega = M, giga = G, tera = T, peta = P
|
||||
// exa = E, zetta = Z, yotta = Y
|
||||
// byte = By, bit = bit
|
||||
func FromDataUnit(u Unit) float64 {
|
||||
switch u {
|
||||
case "bytes", "By": // base 2
|
||||
return Byte
|
||||
case "decbytes": // base 10
|
||||
return Byte
|
||||
case "bits": // base 2
|
||||
case "bits", "bit": // base 2
|
||||
return Bit
|
||||
case "decbits": // base 10
|
||||
return Bit
|
||||
case "kbytes", "kBy": // base 2
|
||||
case "kbytes", "KiBy": // base 2
|
||||
return Kibibyte
|
||||
case "decKbytes", "deckbytes": // base 10
|
||||
case "decKbytes", "deckbytes", "kBy": // base 10
|
||||
return Kilobyte
|
||||
case "mbytes", "MBy": // base 2
|
||||
case "mbytes", "MiBy": // base 2
|
||||
return Mebibyte
|
||||
case "decMbytes", "decmbytes": // base 10
|
||||
case "decMbytes", "decmbytes", "MBy": // base 10
|
||||
return Megabyte
|
||||
case "gbytes", "GBy": // base 2
|
||||
case "gbytes", "GiBy": // base 2
|
||||
return Gibibyte
|
||||
case "decGbytes", "decgbytes": // base 10
|
||||
case "decGbytes", "decgbytes", "GBy": // base 10
|
||||
return Gigabyte
|
||||
case "tbytes", "TBy": // base 2
|
||||
case "tbytes", "TiBy": // base 2
|
||||
return Tebibyte
|
||||
case "decTbytes", "dectbytes": // base 10
|
||||
case "decTbytes", "dectbytes", "TBy": // base 10
|
||||
return Terabyte
|
||||
case "pbytes", "PBy": // base 2
|
||||
case "pbytes", "PiBy": // base 2
|
||||
return Pebibyte
|
||||
case "decPbytes", "decpbytes": // base 10
|
||||
case "decPbytes", "decpbytes", "PBy": // base 10
|
||||
return Petabyte
|
||||
case "EBy": // base 10
|
||||
return Exabyte
|
||||
case "ZBy": // base 10
|
||||
return Zettabyte
|
||||
case "YBy": // base 10
|
||||
return Yottabyte
|
||||
case "Kibit": // base 2
|
||||
return Kibibit
|
||||
case "Mibit": // base 2
|
||||
return Mebibit
|
||||
case "Gibit": // base 2
|
||||
return Gibibit
|
||||
case "Tibit": // base 2
|
||||
return Tebibit
|
||||
case "Pibit": // base 2
|
||||
return Pebibit
|
||||
case "EiBy": // base 2
|
||||
return Exbibyte
|
||||
case "ZiBy": // base 2
|
||||
return Zebibyte
|
||||
case "YiBy": // base 2
|
||||
return Yobibyte
|
||||
case "kbit": // base 10
|
||||
return Kilobit
|
||||
case "Mbit": // base 10
|
||||
return Megabit
|
||||
case "Gbit": // base 10
|
||||
return Gigabit
|
||||
case "Tbit": // base 10
|
||||
return Terabit
|
||||
case "Pbit": // base 10
|
||||
return Petabit
|
||||
case "Ebit": // base 10
|
||||
return Exabit
|
||||
case "Zbit": // base 10
|
||||
return Zettabit
|
||||
case "Ybit": // base 10
|
||||
return Yottabit
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -54,6 +54,12 @@ func (*dataRateConverter) Name() string {
|
||||
return "data_rate"
|
||||
}
|
||||
|
||||
// Notation followed by UCUM:
|
||||
// https://ucum.org/ucum
|
||||
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
|
||||
// kilo = k, mega = M, giga = G, tera = T, peta = P
|
||||
// exa = E, zetta = Z, yotta = Y
|
||||
// byte = By, bit = bit
|
||||
func FromDataRateUnit(u Unit) float64 {
|
||||
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
|
||||
switch u {
|
||||
@@ -65,46 +71,70 @@ func FromDataRateUnit(u Unit) float64 {
|
||||
return BitPerSecond
|
||||
case "bps", "bit/s": // bits/sec(SI)
|
||||
return BitPerSecond
|
||||
case "KiBs": // kibibytes/sec
|
||||
case "KiBs", "KiBy/s": // kibibytes/sec
|
||||
return KibibytePerSecond
|
||||
case "Kibits": // kibibits/sec
|
||||
case "Kibits", "Kibit/s": // kibibits/sec
|
||||
return KibibitPerSecond
|
||||
case "KBs", "kBy/s": // kilobytes/sec
|
||||
return KilobytePerSecond
|
||||
case "Kbits", "kbit/s": // kilobits/sec
|
||||
return KilobitPerSecond
|
||||
case "MiBs": // mebibytes/sec
|
||||
case "MiBs", "MiBy/s": // mebibytes/sec
|
||||
return MebibytePerSecond
|
||||
case "Mibits": // mebibits/sec
|
||||
case "Mibits", "Mibit/s": // mebibits/sec
|
||||
return MebibitPerSecond
|
||||
case "MBs", "MBy/s": // megabytes/sec
|
||||
return MegabytePerSecond
|
||||
case "Mbits", "Mbit/s": // megabits/sec
|
||||
return MegabitPerSecond
|
||||
case "GiBs": // gibibytes/sec
|
||||
case "GiBs", "GiBy/s": // gibibytes/sec
|
||||
return GibibytePerSecond
|
||||
case "Gibits": // gibibits/sec
|
||||
case "Gibits", "Gibit/s": // gibibits/sec
|
||||
return GibibitPerSecond
|
||||
case "GBs", "GBy/s": // gigabytes/sec
|
||||
return GigabytePerSecond
|
||||
case "Gbits", "Gbit/s": // gigabits/sec
|
||||
return GigabitPerSecond
|
||||
case "TiBs": // tebibytes/sec
|
||||
case "TiBs", "TiBy/s": // tebibytes/sec
|
||||
return TebibytePerSecond
|
||||
case "Tibits": // tebibits/sec
|
||||
case "Tibits", "Tibit/s": // tebibits/sec
|
||||
return TebibitPerSecond
|
||||
case "TBs", "TBy/s": // terabytes/sec
|
||||
return TerabytePerSecond
|
||||
case "Tbits", "Tbit/s": // terabits/sec
|
||||
return TerabitPerSecond
|
||||
case "PiBs": // pebibytes/sec
|
||||
case "PiBs", "PiBy/s": // pebibytes/sec
|
||||
return PebibytePerSecond
|
||||
case "Pibits": // pebibits/sec
|
||||
case "Pibits", "Pibit/s": // pebibits/sec
|
||||
return PebibitPerSecond
|
||||
case "PBs", "PBy/s": // petabytes/sec
|
||||
return PetabytePerSecond
|
||||
case "Pbits", "Pbit/s": // petabits/sec
|
||||
return PetabitPerSecond
|
||||
case "EBy/s": // exabytes/sec
|
||||
return ExabytePerSecond
|
||||
case "Ebit/s": // exabits/sec
|
||||
return ExabitPerSecond
|
||||
case "EiBy/s": // exbibytes/sec
|
||||
return ExbibytePerSecond
|
||||
case "Eibit/s": // exbibits/sec
|
||||
return ExbibitPerSecond
|
||||
case "ZBy/s": // zettabytes/sec
|
||||
return ZettabytePerSecond
|
||||
case "Zbit/s": // zettabits/sec
|
||||
return ZettabitPerSecond
|
||||
case "ZiBy/s": // zebibytes/sec
|
||||
return ZebibytePerSecond
|
||||
case "Zibit/s": // zebibits/sec
|
||||
return ZebibitPerSecond
|
||||
case "YBy/s": // yottabytes/sec
|
||||
return YottabytePerSecond
|
||||
case "Ybit/s": // yottabits/sec
|
||||
return YottabitPerSecond
|
||||
case "YiBy/s": // yobibytes/sec
|
||||
return YobibytePerSecond
|
||||
case "Yibit/s": // yobibits/sec
|
||||
return YobibitPerSecond
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -75,3 +75,83 @@ func TestDataRate(t *testing.T) {
|
||||
// 1024 * 1024 * 1024 bytes = 1 gbytes
|
||||
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
|
||||
}
|
||||
|
||||
func TestDataRateConversionUCUMUnit(t *testing.T) {
|
||||
dataRateConverter := NewDataRateConverter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input Value
|
||||
toUnit Unit
|
||||
expected Value
|
||||
}{
|
||||
// Binary byte scaling
|
||||
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
|
||||
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
|
||||
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
|
||||
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
|
||||
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
|
||||
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
|
||||
// Binary bit scaling
|
||||
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
|
||||
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
|
||||
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
|
||||
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
|
||||
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
|
||||
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
|
||||
// Bytes to bits
|
||||
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
|
||||
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
|
||||
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
|
||||
// Unit alias
|
||||
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
|
||||
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
|
||||
// SI byte scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
|
||||
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
|
||||
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
|
||||
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
|
||||
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
|
||||
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
|
||||
// Binary byte scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
|
||||
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
|
||||
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
|
||||
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
|
||||
// SI bit scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
|
||||
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
|
||||
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
|
||||
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
|
||||
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
|
||||
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
|
||||
// Binary bit scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
|
||||
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
|
||||
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
|
||||
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
|
||||
// Bytes to bits (Exbi, Zebi, Yobi)
|
||||
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
|
||||
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
|
||||
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataRateConverter.Convert(tt.input, tt.toUnit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
|
||||
// 1024 bytes = 1 kbytes
|
||||
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
|
||||
// 1 byte = 8 bits
|
||||
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
|
||||
// 1 mbytes = 1024 kbytes
|
||||
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
|
||||
// 1024 kbytes = 1 mbytes
|
||||
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
|
||||
// 1 mbytes = 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
|
||||
// 1024 mbytes = 1 gbytes
|
||||
@@ -45,10 +45,90 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
|
||||
// 1024 tbytes = 1 PiBy
|
||||
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
|
||||
// 1 pbytes = 1024 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
|
||||
// 1024 pbytes = 1 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
|
||||
// 1024 TiBy = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
|
||||
}
|
||||
|
||||
func TestDataConversionUCUMUnit(t *testing.T) {
|
||||
dataConverter := NewDataConverter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input Value
|
||||
toUnit Unit
|
||||
expected Value
|
||||
}{
|
||||
// Bits to bytes
|
||||
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
|
||||
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
|
||||
// Binary byte scaling
|
||||
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
|
||||
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
|
||||
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
|
||||
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
|
||||
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
|
||||
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
|
||||
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
|
||||
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
|
||||
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
|
||||
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
|
||||
// SI bit scaling
|
||||
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
|
||||
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
|
||||
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
|
||||
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
|
||||
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
|
||||
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
|
||||
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
|
||||
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
|
||||
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
|
||||
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
|
||||
// Binary bit scaling
|
||||
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
|
||||
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
|
||||
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
|
||||
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
|
||||
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
|
||||
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
|
||||
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
|
||||
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
|
||||
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
|
||||
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
|
||||
// Bytes to bits
|
||||
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
|
||||
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
|
||||
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
|
||||
// SI byte scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
|
||||
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
|
||||
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
|
||||
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
|
||||
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
|
||||
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
|
||||
// Binary byte scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
|
||||
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
|
||||
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
|
||||
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
|
||||
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
|
||||
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
|
||||
// SI bit scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
|
||||
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
|
||||
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
|
||||
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
|
||||
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
|
||||
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataConverter.Convert(tt.input, tt.toUnit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
|
||||
return Hour
|
||||
case "d":
|
||||
return Day
|
||||
case "w":
|
||||
case "w", "wk":
|
||||
return Week
|
||||
default:
|
||||
return Second
|
||||
|
||||
@@ -54,4 +54,13 @@ func TestDurationConvert(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
|
||||
// 1000000000 ns = 1 s
|
||||
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
|
||||
|
||||
// 7 d = 1 wk
|
||||
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
|
||||
// 1 wk = 7 d
|
||||
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
|
||||
// 1 wk = 168 h
|
||||
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
|
||||
// 604800 s = 1 wk
|
||||
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
|
||||
}
|
||||
|
||||
@@ -24,30 +24,62 @@ func (f *dataFormatter) Format(value float64, unit string) string {
|
||||
return humanize.IBytes(uint64(value))
|
||||
case "decbytes":
|
||||
return humanize.Bytes(uint64(value))
|
||||
case "bits":
|
||||
return humanize.IBytes(uint64(value * converter.Bit))
|
||||
case "bits", "bit":
|
||||
// humanize.IBytes/Bytes doesn't support bits
|
||||
// and returns 0 B for values less than a byte
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b", value)
|
||||
}
|
||||
return humanize.IBytes(uint64(value / 8))
|
||||
case "decbits":
|
||||
return humanize.Bytes(uint64(value * converter.Bit))
|
||||
case "kbytes", "kBy":
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b", value)
|
||||
}
|
||||
return humanize.Bytes(uint64(value / 8))
|
||||
case "kbytes", "KiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit))
|
||||
case "decKbytes", "deckbytes":
|
||||
return humanize.IBytes(uint64(value * converter.Kilobit))
|
||||
case "mbytes", "MBy":
|
||||
case "Kibit":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit / 8))
|
||||
case "decKbytes", "deckbytes", "kBy":
|
||||
return humanize.Bytes(uint64(value * converter.Kilobit))
|
||||
case "kbit":
|
||||
return humanize.Bytes(uint64(value * converter.Kilobit / 8))
|
||||
case "mbytes", "MiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit))
|
||||
case "decMbytes", "decmbytes":
|
||||
case "Mibit":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit / 8))
|
||||
case "decMbytes", "decmbytes", "MBy":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit))
|
||||
case "gbytes", "GBy":
|
||||
case "Mbit":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit / 8))
|
||||
case "gbytes", "GiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit))
|
||||
case "decGbytes", "decgbytes":
|
||||
case "Gibit":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit / 8))
|
||||
case "decGbytes", "decgbytes", "GBy":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit))
|
||||
case "tbytes", "TBy":
|
||||
case "Gbit":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit / 8))
|
||||
case "tbytes", "TiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit))
|
||||
case "decTbytes", "dectbytes":
|
||||
case "Tibit":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit / 8))
|
||||
case "decTbytes", "dectbytes", "TBy":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit))
|
||||
case "pbytes", "PBy":
|
||||
case "Tbit":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit / 8))
|
||||
case "pbytes", "PiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Pebibit))
|
||||
case "decPbytes", "decpbytes":
|
||||
case "Pbit":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit / 8))
|
||||
case "decPbytes", "decpbytes", "PBy":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit))
|
||||
case "EiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Exbibit))
|
||||
case "Ebit":
|
||||
return humanize.Bytes(uint64(value * converter.Exabit / 8))
|
||||
case "EBy":
|
||||
return humanize.Bytes(uint64(value * converter.Exabit))
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
return fmt.Sprintf("%v", value)
|
||||
|
||||
@@ -25,49 +25,66 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
|
||||
case "Bps", "By/s":
|
||||
return humanize.Bytes(uint64(value)) + "/s"
|
||||
case "binbps":
|
||||
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
// humanize.IBytes/Bytes doesn't support bits
|
||||
// and returns 0 B for values less than a byte
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b/s", value)
|
||||
}
|
||||
return humanize.IBytes(uint64(value/8)) + "/s"
|
||||
case "bps", "bit/s":
|
||||
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
case "KiBs":
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b/s", value)
|
||||
}
|
||||
return humanize.Bytes(uint64(value/8)) + "/s"
|
||||
case "KiBs", "KiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
|
||||
case "Kibits":
|
||||
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
|
||||
case "Kibits", "Kibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond/8)) + "/s"
|
||||
case "KBs", "kBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
|
||||
case "Kbits", "kbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
|
||||
case "MiBs":
|
||||
return humanize.Bytes(uint64(value*converter.KilobitPerSecond/8)) + "/s"
|
||||
case "MiBs", "MiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
|
||||
case "Mibits":
|
||||
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
|
||||
case "Mibits", "Mibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond/8)) + "/s"
|
||||
case "MBs", "MBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
|
||||
case "Mbits", "Mbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
|
||||
case "GiBs":
|
||||
return humanize.Bytes(uint64(value*converter.MegabitPerSecond/8)) + "/s"
|
||||
case "GiBs", "GiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
|
||||
case "Gibits":
|
||||
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
|
||||
case "Gibits", "Gibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond/8)) + "/s"
|
||||
case "GBs", "GBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
|
||||
case "Gbits", "Gbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
|
||||
case "TiBs":
|
||||
return humanize.Bytes(uint64(value*converter.GigabitPerSecond/8)) + "/s"
|
||||
case "TiBs", "TiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
|
||||
case "Tibits":
|
||||
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
|
||||
case "Tibits", "Tibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond/8)) + "/s"
|
||||
case "TBs", "TBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
|
||||
case "Tbits", "Tbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
|
||||
case "PiBs":
|
||||
return humanize.Bytes(uint64(value*converter.TerabitPerSecond/8)) + "/s"
|
||||
case "PiBs", "PiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
|
||||
case "Pibits":
|
||||
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
|
||||
case "Pibits", "Pibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond/8)) + "/s"
|
||||
case "PBs", "PBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
|
||||
case "Pbits", "Pbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
|
||||
return humanize.Bytes(uint64(value*converter.PetabitPerSecond/8)) + "/s"
|
||||
// Exa units
|
||||
case "EBy/s":
|
||||
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
|
||||
case "Ebit/s":
|
||||
return humanize.Bytes(uint64(value*converter.ExabitPerSecond/8)) + "/s"
|
||||
case "EiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
|
||||
case "Eibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond/8)) + "/s"
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
return fmt.Sprintf("%v", value)
|
||||
|
||||
150
pkg/query-service/formatter/data_rate_test.go
Normal file
150
pkg/query-service/formatter/data_rate_test.go
Normal file
@@ -0,0 +1,150 @@
|
||||
package formatter
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDataRateFormatterComprehensive(t *testing.T) {
|
||||
dataRateFormatter := NewDataRateFormatter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value float64
|
||||
unit string
|
||||
expected string
|
||||
}{
|
||||
// IEC Bits/sec - binbps, bps
|
||||
{name: "binbps as Bps", value: 7, unit: "binbps", expected: "7 b/s"},
|
||||
{name: "100 binbps as 12 Bps", value: 100, unit: "binbps", expected: "12 B/s"},
|
||||
{name: "binbps as 23 GiBs", value: 8 * 1024 * 1024 * 1024 * 23, unit: "binbps", expected: "23 GiB/s"},
|
||||
|
||||
// SI Bits/sec - bps, bit/s
|
||||
{name: "bps as Bps", value: 5, unit: "bps", expected: "5 b/s"},
|
||||
{name: "200 bitps as 25 Bps", value: 200, unit: "bit/s", expected: "25 B/s"},
|
||||
{name: "bitps as MBs", value: 8 * 1000 * 1000 * 7, unit: "bit/s", expected: "7.0 MB/s"},
|
||||
|
||||
// IEC Base bytes/sec - binBps
|
||||
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
|
||||
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
|
||||
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
|
||||
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
|
||||
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
|
||||
|
||||
// SI Base bytes/sec - Bps, By/s
|
||||
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
|
||||
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
|
||||
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
|
||||
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
|
||||
|
||||
// Kibibytes/sec - KiBs, KiBy/s
|
||||
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
|
||||
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
|
||||
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
|
||||
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
|
||||
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
|
||||
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
|
||||
|
||||
// Kibibits/sec - Kibits, Kibit/s
|
||||
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "128 B/s"},
|
||||
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "5.3 MiB/s"},
|
||||
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "1.3 KiB/s"},
|
||||
|
||||
// Kilobytes/sec (SI) - KBs, kBy/s
|
||||
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
|
||||
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
|
||||
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
|
||||
|
||||
// Kilobits/sec (SI) - Kbits, kbit/s
|
||||
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "125 B/s"},
|
||||
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "125 B/s"},
|
||||
|
||||
// Mebibytes/sec - MiBs, MiBy/s
|
||||
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
|
||||
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
|
||||
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
|
||||
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
|
||||
|
||||
// Mebibits/sec - Mibits, Mibit/s
|
||||
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "5.0 MiB/s"},
|
||||
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "1.3 MiB/s"},
|
||||
|
||||
// Megabytes/sec (SI) - MBs, MBy/s
|
||||
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
|
||||
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
|
||||
|
||||
// Megabits/sec (SI) - Mbits, Mbit/s
|
||||
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "125 kB/s"},
|
||||
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "125 kB/s"},
|
||||
|
||||
// Gibibytes/sec - GiBs, GiBy/s
|
||||
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
|
||||
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
|
||||
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
|
||||
|
||||
// Gibibits/sec - Gibits, Gibit/s
|
||||
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "5.3 TiB/s"},
|
||||
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "5.3 TiB/s"},
|
||||
|
||||
// Gigabytes/sec (SI) - GBs, GBy/s
|
||||
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
|
||||
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
|
||||
|
||||
// Gigabits/sec (SI) - Gbits, Gbit/s
|
||||
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "5.3 TB/s"},
|
||||
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "5.3 TB/s"},
|
||||
|
||||
// Tebibytes/sec - TiBs, TiBy/s
|
||||
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
|
||||
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
|
||||
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
|
||||
|
||||
// Tebibits/sec - Tibits, Tibit/s
|
||||
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "5.3 PiB/s"},
|
||||
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "5.3 PiB/s"},
|
||||
|
||||
// Terabytes/sec (SI) - TBs, TBy/s
|
||||
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
|
||||
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
|
||||
|
||||
// Terabits/sec (SI) - Tbits, Tbit/s
|
||||
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "5.3 PB/s"},
|
||||
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "5.3 PB/s"},
|
||||
|
||||
// Pebibytes/sec - PiBs, PiBy/s
|
||||
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
|
||||
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
|
||||
|
||||
// Pebibits/sec - Pibits, Pibit/s
|
||||
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "1.3 EiB/s"},
|
||||
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "1.3 EiB/s"},
|
||||
|
||||
// Petabytes/sec (SI) - PBs, PBy/s
|
||||
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
|
||||
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
|
||||
|
||||
// Petabits/sec (SI) - Pbits, Pbit/s
|
||||
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "5.3 PB/s"},
|
||||
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "5.3 PB/s"},
|
||||
|
||||
// Exabytes/sec (SI) - EBy/s
|
||||
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
|
||||
|
||||
// Exabits/sec (SI) - Ebit/s
|
||||
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "1.3 EB/s"},
|
||||
|
||||
// Exbibytes/sec (IEC) - EiBy/s
|
||||
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
|
||||
|
||||
// Exbibits/sec (IEC) - Eibit/s
|
||||
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "1.3 EiB/s"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataRateFormatter.Format(tt.value, tt.unit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -14,21 +14,174 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
|
||||
}
|
||||
|
||||
func TestDataFormatterComprehensive(t *testing.T) {
|
||||
dataFormatter := NewDataFormatter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value float64
|
||||
unit string
|
||||
expected string
|
||||
}{
|
||||
// IEC Bits - bits, bit
|
||||
{name: "bits: 1", value: 1, unit: "bits", expected: "1 b"},
|
||||
{name: "bits: 7", value: 7, unit: "bit", expected: "7 b"},
|
||||
{name: "bits: to MiB = 8 * 1024 * 1024 * 9", value: 8 * 1024 * 1024 * 9, unit: "bits", expected: "9.0 MiB"},
|
||||
|
||||
// SI Bits - decbits, bit
|
||||
{name: "decbits: 1", value: 1, unit: "decbits", expected: "1 b"},
|
||||
{name: "decbits: 7", value: 7, unit: "decbits", expected: "7 b"},
|
||||
{name: "decbits: to MB = 8 * 1000 * 1000 * 4", value: 8 * 1000 * 1000 * 4, unit: "decbits", expected: "4.0 MB"},
|
||||
|
||||
// IEC Base bytes - bytes, By
|
||||
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
|
||||
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
|
||||
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
|
||||
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
|
||||
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
|
||||
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
|
||||
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
|
||||
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
|
||||
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
|
||||
|
||||
// SI Base bytes - decbytes
|
||||
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
|
||||
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
|
||||
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
|
||||
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
|
||||
|
||||
// Kibibytes - kbytes, KiBy (IEC)
|
||||
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
|
||||
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
|
||||
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
|
||||
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
|
||||
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
|
||||
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
|
||||
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
|
||||
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
|
||||
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
|
||||
|
||||
// SI Kilobytes - decKbytes, deckbytes, kBy
|
||||
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
|
||||
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
|
||||
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
|
||||
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
|
||||
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
|
||||
|
||||
// Mebibytes - mbytes, MiBy (IEC)
|
||||
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
|
||||
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
|
||||
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
|
||||
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
|
||||
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
|
||||
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
|
||||
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
|
||||
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
|
||||
|
||||
// SI Megabytes - decMbytes, decmbytes, MBy
|
||||
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
|
||||
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
|
||||
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
|
||||
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
|
||||
|
||||
// Gibibytes - gbytes, GiBy (IEC)
|
||||
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
|
||||
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
|
||||
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
|
||||
|
||||
// SI Gigabytes - decGbytes, decgbytes, GBy
|
||||
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
|
||||
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
|
||||
|
||||
// Tebibytes - tbytes, TiBy (IEC)
|
||||
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
|
||||
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
|
||||
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
|
||||
|
||||
// SI Terabytes - decTbytes, dectbytes, TBy
|
||||
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
|
||||
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
|
||||
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
|
||||
|
||||
// Pebibytes - pbytes, PiBy (IEC)
|
||||
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
|
||||
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
|
||||
|
||||
// SI Petabytes - decPbytes, decpbytes, PBy
|
||||
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
|
||||
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
|
||||
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
|
||||
|
||||
// Exbibytes - EiBy (IEC)
|
||||
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
|
||||
|
||||
// Exabytes - EBy (SI)
|
||||
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
|
||||
|
||||
// Kibibits - Kibit (IEC): 1 Kibit = 1024 bits = 128 bytes
|
||||
{name: "Kibit: 1 = 128 B", value: 1, unit: "Kibit", expected: "128 B"},
|
||||
{name: "Kibit: 1024 = 128 KiB", value: 1024, unit: "Kibit", expected: "128 KiB"},
|
||||
|
||||
// Mebibits - Mibit (IEC): 1 Mibit = 1024 Kibit = 128 KiB
|
||||
{name: "Mibit: 1 = 128 KiB", value: 1, unit: "Mibit", expected: "128 KiB"},
|
||||
{name: "Mibit: 1024 = 128 MiB", value: 1024, unit: "Mibit", expected: "128 MiB"},
|
||||
|
||||
// Gibibits - Gibit (IEC): 1 Gibit = 1024 Mibit = 128 MiB
|
||||
{name: "Gibit: 1 = 128 MiB", value: 1, unit: "Gibit", expected: "128 MiB"},
|
||||
{name: "Gibit: 42*1024 = 5.3 TiB", value: 42 * 1024, unit: "Gibit", expected: "5.3 TiB"},
|
||||
|
||||
// Tebibits - Tibit (IEC): 1 Tibit = 1024 Gibit = 128 GiB
|
||||
{name: "Tibit: 1 = 128 GiB", value: 1, unit: "Tibit", expected: "128 GiB"},
|
||||
{name: "Tibit: 42*1024 = 5.3 PiB", value: 42 * 1024, unit: "Tibit", expected: "5.3 PiB"},
|
||||
|
||||
// Kilobits - kbit (SI): 1 kbit = 1000 bits = 125 bytes
|
||||
{name: "kbit: 1 = 125 B", value: 1, unit: "kbit", expected: "125 B"},
|
||||
{name: "kbit: 1000 = 125 kB", value: 1000, unit: "kbit", expected: "125 kB"},
|
||||
|
||||
// Megabits - Mbit (SI): 1 Mbit = 1000 kbit = 125 kB
|
||||
{name: "Mbit: 1 = 125 kB", value: 1, unit: "Mbit", expected: "125 kB"},
|
||||
{name: "Mbit: 1000 = 125 MB", value: 1000, unit: "Mbit", expected: "125 MB"},
|
||||
|
||||
// Gigabits - Gbit (SI): 1 Gbit = 1000 Mbit = 125 MB
|
||||
{name: "Gbit: 1 = 125 MB", value: 1, unit: "Gbit", expected: "125 MB"},
|
||||
{name: "Gbit: 42*1000 = 5.3 TB", value: 42 * 1000, unit: "Gbit", expected: "5.3 TB"},
|
||||
|
||||
// Terabits - Tbit (SI): 1 Tbit = 1000 Gbit = 125 GB
|
||||
{name: "Tbit: 1 = 125 GB", value: 1, unit: "Tbit", expected: "125 GB"},
|
||||
{name: "Tbit: 42*1000 = 5.3 PB", value: 42 * 1000, unit: "Tbit", expected: "5.3 PB"},
|
||||
|
||||
// Petabits - Pbit (SI): 1 Pbit = 1000 Tbit = 125 TB
|
||||
{name: "Pbit: 1 = 125 TB", value: 1, unit: "Pbit", expected: "125 TB"},
|
||||
{name: "Pbit: 42 = 5.3 PB", value: 42, unit: "Pbit", expected: "5.3 PB"},
|
||||
|
||||
// Exabits - Ebit (SI): 1 Ebit = 1000 Pbit = 125 PB
|
||||
{name: "Ebit: 1 = 125 PB", value: 1, unit: "Ebit", expected: "125 PB"},
|
||||
{name: "Ebit: 10 = 1.3 EB", value: 10, unit: "Ebit", expected: "1.3 EB"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataFormatter.Format(tt.value, tt.unit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,11 +18,11 @@ var (
|
||||
|
||||
func FromUnit(u string) Formatter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
|
||||
return DurationFormatter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
|
||||
return DataFormatter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
|
||||
return DataRateFormatter
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentFormatter
|
||||
|
||||
@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
|
||||
return toHours(value)
|
||||
case "d":
|
||||
return toDays(value)
|
||||
case "w":
|
||||
case "w", "wk":
|
||||
return toWeeks(value)
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
|
||||
@@ -205,7 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = matchingKey.Indexes
|
||||
key.Materialized = matchingKey.Materialized
|
||||
key.JSONPlan = matchingKey.JSONPlan
|
||||
|
||||
|
||||
return actions
|
||||
} else {
|
||||
// multiple matching keys, set materialized only if all the keys are materialized
|
||||
|
||||
@@ -483,6 +483,22 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
value1 := v.Visit(values[0])
|
||||
value2 := v.Visit(values[1])
|
||||
|
||||
switch value1.(type) {
|
||||
case float64:
|
||||
if _, ok := value2.(float64); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
case string:
|
||||
if _, ok := value2.(string); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
default:
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
@@ -855,7 +871,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -375,13 +375,6 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
|
||||
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
|
||||
}
|
||||
|
||||
if os.Getenv("INTERPOLATION_ENABLED") != "" {
|
||||
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
|
||||
if config.Flagger.Config.Boolean == nil {
|
||||
config.Flagger.Config.Boolean = make(map[string]bool)
|
||||
}
|
||||
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
|
||||
}
|
||||
}
|
||||
|
||||
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {
|
||||
|
||||
@@ -13,6 +13,8 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/dashboard"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity/implidentity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/metricsexplorer/implmetricsexplorer"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
@@ -54,6 +56,7 @@ type Modules struct {
|
||||
Preference preference.Module
|
||||
User user.Module
|
||||
UserGetter user.Getter
|
||||
Identity identity.Module
|
||||
SavedView savedview.Module
|
||||
Apdex apdex.Module
|
||||
Dashboard dashboard.Module
|
||||
@@ -88,7 +91,8 @@ func NewModules(
|
||||
) Modules {
|
||||
quickfilter := implquickfilter.NewModule(implquickfilter.NewStore(sqlstore))
|
||||
orgSetter := implorganization.NewSetter(implorganization.NewStore(sqlstore), alertmanager, quickfilter)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User)
|
||||
identityModule := implidentity.NewModule(sqlstore)
|
||||
user := impluser.NewModule(impluser.NewStore(sqlstore, providerSettings), tokenizer, emailing, providerSettings, orgSetter, authz, analytics, config.User, identityModule)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstore, providerSettings))
|
||||
ruleStore := sqlrulestore.NewRuleStore(sqlstore, queryParser, providerSettings)
|
||||
|
||||
@@ -101,11 +105,12 @@ func NewModules(
|
||||
Dashboard: dashboard,
|
||||
User: user,
|
||||
UserGetter: userGetter,
|
||||
Identity: identityModule,
|
||||
QuickFilter: quickfilter,
|
||||
TraceFunnel: impltracefunnel.NewModule(impltracefunnel.NewStore(sqlstore)),
|
||||
RawDataExport: implrawdataexport.NewModule(querier),
|
||||
AuthDomain: implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs),
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter),
|
||||
Session: implsession.NewModule(providerSettings, authNs, user, userGetter, implauthdomain.NewModule(implauthdomain.NewStore(sqlstore), authNs), tokenizer, orgGetter, identityModule),
|
||||
SpanPercentile: implspanpercentile.NewModule(querier, providerSettings),
|
||||
Services: implservices.NewModule(querier, telemetryStore),
|
||||
MetricsExplorer: implmetricsexplorer.NewModule(telemetryStore, telemetryMetadataStore, cache, ruleStore, dashboard, providerSettings, config.MetricsExplorer),
|
||||
|
||||
@@ -23,6 +23,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/global/signozglobal"
|
||||
"github.com/SigNoz/signoz/pkg/modules/authdomain/implauthdomain"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/preference/implpreference"
|
||||
@@ -167,6 +168,11 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
|
||||
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddIdentityFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewUpdateUserIdentity(sqlstore, sqlschema),
|
||||
sqlmigration.NewMigrateUserRolesFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -218,9 +224,9 @@ func NewSharderProviderFactories() factory.NamedMap[factory.ProviderFactory[shar
|
||||
)
|
||||
}
|
||||
|
||||
func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] {
|
||||
func NewStatsReporterProviderFactories(telemetryStore telemetrystore.TelemetryStore, collectors []statsreporter.StatsCollector, orgGetter organization.Getter, userGetter user.Getter, identityModule identity.Module, tokenizer tokenizer.Tokenizer, build version.Build, analyticsConfig analytics.Config) factory.NamedMap[factory.ProviderFactory[statsreporter.StatsReporter, statsreporter.Config]] {
|
||||
return factory.MustNewNamedMap(
|
||||
analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, tokenizer, build, analyticsConfig),
|
||||
analyticsstatsreporter.NewFactory(telemetryStore, collectors, orgGetter, userGetter, identityModule, tokenizer, build, analyticsConfig),
|
||||
noopstatsreporter.NewFactory(),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfmanagertest"
|
||||
"github.com/SigNoz/signoz/pkg/analytics"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/modules/identity/implidentity"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization/implorganization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user/impluser"
|
||||
"github.com/SigNoz/signoz/pkg/queryparser"
|
||||
@@ -75,10 +76,12 @@ func TestNewProviderFactories(t *testing.T) {
|
||||
})
|
||||
|
||||
assert.NotPanics(t, func() {
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual), instrumentationtest.New().ToProviderSettings()))
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)), nil)
|
||||
sqlStore := sqlstoretest.New(sqlstore.Config{Provider: "sqlite"}, sqlmock.QueryMatcherEqual)
|
||||
userGetter := impluser.NewGetter(impluser.NewStore(sqlStore, instrumentationtest.New().ToProviderSettings()))
|
||||
orgGetter := implorganization.NewGetter(implorganization.NewStore(sqlStore), nil)
|
||||
identityModule := implidentity.NewModule(sqlStore)
|
||||
telemetryStore := telemetrystoretest.New(telemetrystore.Config{Provider: "clickhouse"}, sqlmock.QueryMatcherEqual)
|
||||
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, tokenizertest.NewMockTokenizer(t), version.Build{}, analytics.Config{Enabled: true})
|
||||
NewStatsReporterProviderFactories(telemetryStore, []statsreporter.StatsCollector{}, orgGetter, userGetter, identityModule, tokenizertest.NewMockTokenizer(t), version.Build{}, analytics.Config{Enabled: true})
|
||||
})
|
||||
|
||||
assert.NotPanics(t, func() {
|
||||
|
||||
@@ -389,6 +389,8 @@ func New(
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, modules.Identity, config.User.Root)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
|
||||
@@ -422,7 +424,7 @@ func New(
|
||||
ctx,
|
||||
providerSettings,
|
||||
config.StatsReporter,
|
||||
NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, tokenizer, version.Info, config.Analytics),
|
||||
NewStatsReporterProviderFactories(telemetrystore, statsCollectors, orgGetter, userGetter, modules.Identity, tokenizer, version.Info, config.Analytics),
|
||||
config.StatsReporter.Provider(),
|
||||
)
|
||||
if err != nil {
|
||||
@@ -438,6 +440,7 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
|
||||
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
80
pkg/sqlmigration/064_add_root_user.go
Normal file
80
pkg/sqlmigration/064_add_root_user.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addRootUser struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addRootUser{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
column := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("is_root"),
|
||||
DataType: sqlschema.DataTypeBoolean,
|
||||
Nullable: false,
|
||||
}
|
||||
|
||||
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user