mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-18 15:02:35 +00:00
Compare commits
14 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6f0f5b51a9 | ||
|
|
037b92feb3 | ||
|
|
95e023dafd | ||
|
|
d20c866d2c | ||
|
|
06f55e6eda | ||
|
|
7f1d350ffe | ||
|
|
1d3134959d | ||
|
|
b86bd24dd9 | ||
|
|
4c49d45cbf | ||
|
|
9b3d3453b1 | ||
|
|
9d981d8a13 | ||
|
|
6de4520a95 | ||
|
|
f566909320 | ||
|
|
f592bc084d |
@@ -176,25 +176,6 @@ Wir haben Benchmarks veröffentlicht, die Loki mit SigNoz vergleichen. Schauen S
|
||||
Wir ❤️ Beiträge zum Projekt, egal ob große oder kleine. Bitte lies dir zuerst die [CONTRIBUTING.md](CONTRIBUTING.md), durch, bevor du anfängst, Beiträge zu SigNoz zu machen.
|
||||
Du bist dir nicht sicher, wie du anfangen sollst? Schreib uns einfach auf dem #contributing Kanal in unserer [slack community](https://signoz.io/slack)
|
||||
|
||||
### Unsere Projektbetreuer
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## Dokumentation
|
||||
|
||||
28
README.md
28
README.md
@@ -221,34 +221,6 @@ We ❤️ contributions big or small. Please read [CONTRIBUTING.md](CONTRIBUTING
|
||||
|
||||
Not sure how to get started? Just ping us on `#contributing` in our [slack community](https://signoz.io/slack)
|
||||
|
||||
### Project maintainers
|
||||
|
||||
#### Backend
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
- [Shivanshu Raj Shrivastava](https://github.com/shivanshuraj1333)
|
||||
- [Ekansh Gupta](https://github.com/eKuG)
|
||||
- [Aniket Agarwal](https://github.com/aniketio-ctrl)
|
||||
|
||||
#### Frontend
|
||||
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Vikrant Gupta](https://github.com/vikrantgupta25)
|
||||
- [Sagar Rajput](https://github.com/SagarRajput-7)
|
||||
- [Shaheer Kochai](https://github.com/ahmadshaheer)
|
||||
- [Amlan Kumar Nandy](https://github.com/amlannandy)
|
||||
- [Sahil Khan](https://github.com/sawhil)
|
||||
- [Aditya Singh](https://github.com/aks07)
|
||||
- [Abhi Kumar](https://github.com/ahrefabhi)
|
||||
|
||||
#### DevOps
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
- [Vibhu Pandey](https://github.com/therealpandey)
|
||||
|
||||
<br /><br />
|
||||
|
||||
|
||||
|
||||
@@ -187,25 +187,6 @@ Jaeger 仅仅是一个分布式追踪系统。 但是 SigNoz 可以提供 metric
|
||||
|
||||
如果你不知道如何开始? 只需要在 [slack 社区](https://signoz.io/slack) 通过 `#contributing` 频道联系我们。
|
||||
|
||||
### 项目维护人员
|
||||
|
||||
#### 后端
|
||||
|
||||
- [Ankit Nayan](https://github.com/ankitnayan)
|
||||
- [Nityananda Gohain](https://github.com/nityanandagohain)
|
||||
- [Srikanth Chekuri](https://github.com/srikanthccv)
|
||||
- [Vishal Sharma](https://github.com/makeavish)
|
||||
|
||||
#### 前端
|
||||
|
||||
- [Palash Gupta](https://github.com/palashgdev)
|
||||
- [Yunus M](https://github.com/YounixM)
|
||||
- [Rajat Dabade](https://github.com/Rajat-Dabade)
|
||||
|
||||
#### 运维开发
|
||||
|
||||
- [Prashant Shahi](https://github.com/prashant-shahi)
|
||||
|
||||
<br /><br />
|
||||
|
||||
## 文档
|
||||
|
||||
@@ -294,7 +294,6 @@ flagger:
|
||||
config:
|
||||
boolean:
|
||||
use_span_metrics: true
|
||||
interpolation_enabled: false
|
||||
kafka_span_eval: false
|
||||
string:
|
||||
float:
|
||||
@@ -309,3 +308,14 @@ user:
|
||||
allow_self: true
|
||||
# The duration within which a user can reset their password.
|
||||
max_token_lifetime: 6h
|
||||
root:
|
||||
# Whether to enable the root user. When enabled, a root user is provisioned
|
||||
# on startup using the email and password below. The root user cannot be
|
||||
# deleted, updated, or have their password changed through the UI.
|
||||
enabled: false
|
||||
# The email address of the root user.
|
||||
email: ""
|
||||
# The password of the root user. Must meet password requirements.
|
||||
password: ""
|
||||
# The name of the organization to create or look up for the root user.
|
||||
org_name: default
|
||||
|
||||
@@ -4678,6 +4678,8 @@ components:
|
||||
type: string
|
||||
id:
|
||||
type: string
|
||||
isRoot:
|
||||
type: boolean
|
||||
orgId:
|
||||
type: string
|
||||
role:
|
||||
|
||||
@@ -45,7 +45,7 @@ type APIHandler struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
|
||||
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
|
||||
Reader: opts.DataConnector,
|
||||
RuleManager: opts.RulesManager,
|
||||
@@ -58,7 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -175,7 +175,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
GlobalConfig: config.Global,
|
||||
}
|
||||
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
|
||||
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -1542,6 +1542,10 @@ export interface TypesUserDTO {
|
||||
* @type string
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* @type boolean
|
||||
*/
|
||||
isRoot?: boolean;
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import axios from 'api';
|
||||
import { AxiosResponse } from 'axios';
|
||||
import store from 'store';
|
||||
import {
|
||||
QueryKeyRequestProps,
|
||||
QueryKeySuggestionsResponseProps,
|
||||
@@ -18,12 +17,6 @@ export const getKeySuggestions = (
|
||||
signalSource = '',
|
||||
} = props;
|
||||
|
||||
const { globalTime } = store.getState();
|
||||
const resolvedTimeRange = {
|
||||
startUnixMilli: Math.floor(globalTime.minTime / 1000000),
|
||||
endUnixMilli: Math.floor(globalTime.maxTime / 1000000),
|
||||
};
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
const encodedSearchText = encodeURIComponent(searchText);
|
||||
const encodedMetricName = encodeURIComponent(metricName);
|
||||
@@ -31,14 +24,7 @@ export const getKeySuggestions = (
|
||||
const encodedFieldDataType = encodeURIComponent(fieldDataType);
|
||||
const encodedSource = encodeURIComponent(signalSource);
|
||||
|
||||
let url = `/fields/keys?signal=${encodedSignal}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&fieldContext=${encodedFieldContext}&fieldDataType=${encodedFieldDataType}&source=${encodedSource}`;
|
||||
|
||||
if (resolvedTimeRange.startUnixMilli !== undefined) {
|
||||
url += `&startUnixMilli=${resolvedTimeRange.startUnixMilli}`;
|
||||
}
|
||||
if (resolvedTimeRange.endUnixMilli !== undefined) {
|
||||
url += `&endUnixMilli=${resolvedTimeRange.endUnixMilli}`;
|
||||
}
|
||||
|
||||
return axios.get(url);
|
||||
return axios.get(
|
||||
`/fields/keys?signal=${encodedSignal}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&fieldContext=${encodedFieldContext}&fieldDataType=${encodedFieldDataType}&source=${encodedSource}`,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -8,16 +8,7 @@ import {
|
||||
export const getValueSuggestions = (
|
||||
props: QueryKeyValueRequestProps,
|
||||
): Promise<AxiosResponse<QueryKeyValueSuggestionsResponseProps>> => {
|
||||
const {
|
||||
signal,
|
||||
key,
|
||||
searchText,
|
||||
signalSource,
|
||||
metricName,
|
||||
startUnixMilli,
|
||||
endUnixMilli,
|
||||
existingQuery,
|
||||
} = props;
|
||||
const { signal, key, searchText, signalSource, metricName } = props;
|
||||
|
||||
const encodedSignal = encodeURIComponent(signal);
|
||||
const encodedKey = encodeURIComponent(key);
|
||||
@@ -25,17 +16,7 @@ export const getValueSuggestions = (
|
||||
const encodedSearchText = encodeURIComponent(searchText);
|
||||
const encodedSource = encodeURIComponent(signalSource || '');
|
||||
|
||||
let url = `/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&source=${encodedSource}`;
|
||||
|
||||
if (startUnixMilli !== undefined) {
|
||||
url += `&startUnixMilli=${startUnixMilli}`;
|
||||
}
|
||||
if (endUnixMilli !== undefined) {
|
||||
url += `&endUnixMilli=${endUnixMilli}`;
|
||||
}
|
||||
if (existingQuery) {
|
||||
url += `&existingQuery=${encodeURIComponent(existingQuery)}`;
|
||||
}
|
||||
|
||||
return axios.get(url);
|
||||
return axios.get(
|
||||
`/fields/values?signal=${encodedSignal}&name=${encodedKey}&searchText=${encodedSearchText}&metricName=${encodedMetricName}&source=${encodedSource}`,
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Select, Spin } from 'antd';
|
||||
import { getKeySuggestions } from 'api/querySuggestions/getKeySuggestions';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
|
||||
import './ListViewOrderBy.styles.scss';
|
||||
|
||||
@@ -37,20 +34,9 @@ function ListViewOrderBy({
|
||||
>([]);
|
||||
const debounceTimer = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
// Get time range from Redux global state
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
// Fetch key suggestions based on debounced input
|
||||
const { data, isLoading } = useQuery({
|
||||
queryKey: [
|
||||
'orderByKeySuggestions',
|
||||
dataSource,
|
||||
debouncedInput,
|
||||
globalTime.minTime,
|
||||
globalTime.maxTime,
|
||||
],
|
||||
queryKey: ['orderByKeySuggestions', dataSource, debouncedInput],
|
||||
queryFn: async () => {
|
||||
const response = await getKeySuggestions({
|
||||
signal: dataSource,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { CheckCircleFilled } from '@ant-design/icons';
|
||||
import {
|
||||
autocompletion,
|
||||
@@ -34,7 +33,6 @@ import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import useDebounce from 'hooks/useDebounce';
|
||||
import { debounce, isNull } from 'lodash-es';
|
||||
import { Info, TriangleAlert } from 'lucide-react';
|
||||
import { AppState } from 'store/reducers';
|
||||
import {
|
||||
IDetailedError,
|
||||
IQueryContext,
|
||||
@@ -43,7 +41,6 @@ import {
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { QueryKeyDataSuggestionsProps } from 'types/api/querySuggestions/types';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import {
|
||||
getCurrentValueIndexAtCursor,
|
||||
getQueryContextAtCursor,
|
||||
@@ -89,6 +86,7 @@ interface QuerySearchProps {
|
||||
signalSource?: string;
|
||||
hardcodedAttributeKeys?: QueryKeyDataSuggestionsProps[];
|
||||
onRun?: (query: string) => void;
|
||||
showFilterSuggestionsWithoutMetric?: boolean;
|
||||
}
|
||||
|
||||
function QuerySearch({
|
||||
@@ -99,6 +97,7 @@ function QuerySearch({
|
||||
onRun,
|
||||
signalSource,
|
||||
hardcodedAttributeKeys,
|
||||
showFilterSuggestionsWithoutMetric,
|
||||
}: QuerySearchProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const [valueSuggestions, setValueSuggestions] = useState<any[]>([]);
|
||||
@@ -115,20 +114,6 @@ function QuerySearch({
|
||||
const [isFocused, setIsFocused] = useState(false);
|
||||
const editorRef = useRef<EditorView | null>(null);
|
||||
|
||||
// Get time range from Redux global state
|
||||
const globalTime = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
// Convert nanoseconds to milliseconds - use useMemo with both deps to avoid multiple updates
|
||||
const timeRange = useMemo(
|
||||
() => ({
|
||||
startUnixMilli: Math.floor(globalTime.minTime / 1000000),
|
||||
endUnixMilli: Math.floor(globalTime.maxTime / 1000000),
|
||||
}),
|
||||
[globalTime.minTime, globalTime.maxTime],
|
||||
);
|
||||
|
||||
const handleQueryValidation = useCallback((newExpression: string): void => {
|
||||
try {
|
||||
const validationResponse = validateQuery(newExpression);
|
||||
@@ -269,7 +254,8 @@ function QuerySearch({
|
||||
async (searchText?: string): Promise<void> => {
|
||||
if (
|
||||
dataSource === DataSource.METRICS &&
|
||||
!queryData.aggregateAttribute?.key
|
||||
!queryData.aggregateAttribute?.key &&
|
||||
!showFilterSuggestionsWithoutMetric
|
||||
) {
|
||||
setKeySuggestions([]);
|
||||
return;
|
||||
@@ -288,6 +274,7 @@ function QuerySearch({
|
||||
metricName: debouncedMetricName ?? undefined,
|
||||
signalSource: signalSource as 'meter' | '',
|
||||
});
|
||||
|
||||
if (response.data.data) {
|
||||
const { keys } = response.data.data;
|
||||
const options = generateOptions(keys);
|
||||
@@ -317,6 +304,7 @@ function QuerySearch({
|
||||
queryData.aggregateAttribute?.key,
|
||||
signalSource,
|
||||
hardcodedAttributeKeys,
|
||||
showFilterSuggestionsWithoutMetric,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -329,12 +317,7 @@ function QuerySearch({
|
||||
setKeySuggestions([]);
|
||||
debouncedFetchKeySuggestions();
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [
|
||||
dataSource,
|
||||
debouncedMetricName,
|
||||
timeRange.startUnixMilli,
|
||||
timeRange.endUnixMilli,
|
||||
]);
|
||||
}, [dataSource, debouncedMetricName]);
|
||||
|
||||
// Add a state for tracking editing mode
|
||||
const [editingMode, setEditingMode] = useState<
|
||||
@@ -452,7 +435,6 @@ function QuerySearch({
|
||||
}
|
||||
|
||||
const sanitizedSearchText = searchText ? searchText?.trim() : '';
|
||||
const existingQuery = queryData.filter?.expression || '';
|
||||
|
||||
try {
|
||||
const response = await getValueSuggestions({
|
||||
@@ -461,10 +443,9 @@ function QuerySearch({
|
||||
signal: dataSource,
|
||||
signalSource: signalSource as 'meter' | '',
|
||||
metricName: debouncedMetricName ?? undefined,
|
||||
startUnixMilli: timeRange.startUnixMilli,
|
||||
endUnixMilli: timeRange.endUnixMilli,
|
||||
existingQuery,
|
||||
}); // Skip updates if component unmounted or key changed
|
||||
});
|
||||
|
||||
// Skip updates if component unmounted or key changed
|
||||
if (
|
||||
!isMountedRef.current ||
|
||||
lastKeyRef.current !== key ||
|
||||
@@ -476,10 +457,10 @@ function QuerySearch({
|
||||
// Process the response data
|
||||
const responseData = response.data as any;
|
||||
const values = responseData.data?.values || {};
|
||||
const relatedValues = values.relatedValues || [];
|
||||
const stringValues =
|
||||
relatedValues.length > 0 ? relatedValues : values.stringValues || [];
|
||||
const numberValues = values.numberValues || []; // Generate options from string values - explicitly handle empty strings
|
||||
const stringValues = values.stringValues || [];
|
||||
const numberValues = values.numberValues || [];
|
||||
|
||||
// Generate options from string values - explicitly handle empty strings
|
||||
const stringOptions = stringValues
|
||||
// Strict filtering for empty string - we'll handle it as a special case if needed
|
||||
.filter(
|
||||
@@ -551,14 +532,11 @@ function QuerySearch({
|
||||
},
|
||||
[
|
||||
activeKey,
|
||||
isLoadingSuggestions,
|
||||
queryData.filter?.expression,
|
||||
toggleSuggestions,
|
||||
dataSource,
|
||||
signalSource,
|
||||
isLoadingSuggestions,
|
||||
debouncedMetricName,
|
||||
timeRange.startUnixMilli,
|
||||
timeRange.endUnixMilli,
|
||||
signalSource,
|
||||
toggleSuggestions,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -1275,17 +1253,19 @@ function QuerySearch({
|
||||
if (!queryContext) {
|
||||
return;
|
||||
}
|
||||
// Only trigger suggestions and fetch if editor is focused (i.e., user is interacting)
|
||||
if (isFocused && editorRef.current) {
|
||||
// Trigger suggestions based on context
|
||||
if (editorRef.current) {
|
||||
toggleSuggestions(10);
|
||||
// Handle value suggestions for value context
|
||||
if (queryContext.isInValue) {
|
||||
const { keyToken, currentToken } = queryContext;
|
||||
const key = keyToken || currentToken;
|
||||
// Only fetch if needed and if we have a valid key
|
||||
if (key && key !== activeKey && !isLoadingSuggestions) {
|
||||
fetchValueSuggestions({ key });
|
||||
}
|
||||
}
|
||||
|
||||
// Handle value suggestions for value context
|
||||
if (queryContext.isInValue) {
|
||||
const { keyToken, currentToken } = queryContext;
|
||||
const key = keyToken || currentToken;
|
||||
|
||||
// Only fetch if needed and if we have a valid key
|
||||
if (key && key !== activeKey && !isLoadingSuggestions) {
|
||||
fetchValueSuggestions({ key });
|
||||
}
|
||||
}
|
||||
}, [
|
||||
@@ -1294,7 +1274,6 @@ function QuerySearch({
|
||||
isLoadingSuggestions,
|
||||
activeKey,
|
||||
fetchValueSuggestions,
|
||||
isFocused,
|
||||
]);
|
||||
|
||||
const getTooltipContent = (): JSX.Element => (
|
||||
@@ -1587,6 +1566,7 @@ QuerySearch.defaultProps = {
|
||||
hardcodedAttributeKeys: undefined,
|
||||
placeholder:
|
||||
"Enter your filter query (e.g., http.status_code >= 500 AND service.name = 'frontend')",
|
||||
showFilterSuggestionsWithoutMetric: false,
|
||||
};
|
||||
|
||||
export default QuerySearch;
|
||||
|
||||
@@ -138,93 +138,6 @@
|
||||
cursor: pointer;
|
||||
}
|
||||
}
|
||||
|
||||
.search-prompt {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 8px;
|
||||
padding: 10px 12px;
|
||||
margin-top: 4px;
|
||||
border: 1px dashed var(--bg-amber-500);
|
||||
border-radius: 10px;
|
||||
color: var(--bg-amber-200);
|
||||
background: linear-gradient(
|
||||
90deg,
|
||||
var(--bg-ink-500) 0%,
|
||||
var(--bg-ink-400) 100%
|
||||
);
|
||||
cursor: pointer;
|
||||
transition: all 0.16s ease, transform 0.12s ease;
|
||||
text-align: left;
|
||||
box-shadow: 0 2px 12px rgba(0, 0, 0, 0.35);
|
||||
|
||||
&:hover {
|
||||
background: linear-gradient(
|
||||
90deg,
|
||||
var(--bg-ink-400) 0%,
|
||||
var(--bg-ink-300) 100%
|
||||
);
|
||||
box-shadow: 0 4px 18px rgba(0, 0, 0, 0.45);
|
||||
}
|
||||
|
||||
&:active {
|
||||
transform: translateY(1px);
|
||||
}
|
||||
|
||||
&__icon {
|
||||
color: var(--bg-amber-400);
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
&__text {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 2px;
|
||||
}
|
||||
|
||||
&__title {
|
||||
color: var(--bg-amber-200);
|
||||
}
|
||||
|
||||
&__subtitle {
|
||||
color: var(--bg-amber-300);
|
||||
font-size: 12px;
|
||||
}
|
||||
}
|
||||
.lightMode & {
|
||||
.search-prompt {
|
||||
border: 1px dashed var(--bg-amber-500);
|
||||
color: var(--bg-amber-800);
|
||||
background: linear-gradient(
|
||||
90deg,
|
||||
var(--bg-vanilla-200) 0%,
|
||||
var(--bg-vanilla-100) 100%
|
||||
);
|
||||
box-shadow: 0 2px 12px rgba(184, 107, 0, 0.08);
|
||||
|
||||
&:hover {
|
||||
background: linear-gradient(
|
||||
90deg,
|
||||
var(--bg-vanilla-100) 0%,
|
||||
var(--bg-vanilla-50) 100%
|
||||
);
|
||||
box-shadow: 0 4px 16px rgba(184, 107, 0, 0.15);
|
||||
}
|
||||
|
||||
&__icon {
|
||||
color: var(--bg-amber-600);
|
||||
}
|
||||
|
||||
&__title {
|
||||
color: var(--bg-amber-800);
|
||||
}
|
||||
|
||||
&__subtitle {
|
||||
color: var(--bg-amber-800);
|
||||
}
|
||||
}
|
||||
}
|
||||
.go-to-docs {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
|
||||
@@ -2,16 +2,8 @@
|
||||
/* eslint-disable sonarjs/no-identical-functions */
|
||||
/* eslint-disable jsx-a11y/no-static-element-interactions */
|
||||
/* eslint-disable jsx-a11y/click-events-have-key-events */
|
||||
import {
|
||||
Fragment,
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { Button, Checkbox, Input, InputRef, Skeleton, Typography } from 'antd';
|
||||
import { Fragment, useMemo, useState } from 'react';
|
||||
import { Button, Checkbox, Input, Skeleton, Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import { removeKeysFromExpression } from 'components/QueryBuilderV2/utils';
|
||||
import {
|
||||
@@ -19,18 +11,21 @@ import {
|
||||
QuickFiltersSource,
|
||||
} from 'components/QuickFilters/types';
|
||||
import { OPERATORS } from 'constants/antlrQueryConstants';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import {
|
||||
DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY,
|
||||
PANEL_TYPES,
|
||||
} from 'constants/queryBuilder';
|
||||
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
|
||||
import { getOperatorValue } from 'container/QueryBuilder/filters/QueryBuilderSearch/utils';
|
||||
import { useGetAggregateValues } from 'hooks/queryBuilder/useGetAggregateValues';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { cloneDeep, isArray, isEqual, isFunction } from 'lodash-es';
|
||||
import { AlertTriangle, ChevronDown, ChevronRight } from 'lucide-react';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { ChevronDown, ChevronRight } from 'lucide-react';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { Query, TagFilterItem } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
import { GlobalReducer } from 'types/reducer/globalTime';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import LogsQuickFilterEmptyState from './LogsQuickFilterEmptyState';
|
||||
@@ -73,15 +68,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
panelType,
|
||||
} = useQueryBuilder();
|
||||
|
||||
// Get time range from Redux global state
|
||||
const { maxTime, minTime } = useSelector<AppState, GlobalReducer>(
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
// Convert nanoseconds to milliseconds
|
||||
const startUnixMilli = useMemo(() => Math.floor(minTime / 1000000), [minTime]);
|
||||
const endUnixMilli = useMemo(() => Math.floor(maxTime / 1000000), [maxTime]);
|
||||
|
||||
// Determine if we're in ListView mode
|
||||
const isListView = panelType === PANEL_TYPES.LIST;
|
||||
// In ListView mode, use index 0 for most sources; for TRACES_EXPLORER, use lastUsedQuery
|
||||
@@ -95,12 +81,6 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
return lastUsedQuery || 0;
|
||||
}, [isListView, source, lastUsedQuery]);
|
||||
|
||||
// Extract current filter expression for the active query
|
||||
const currentFilterExpression = useMemo(() => {
|
||||
const queryData = currentQuery.builder.queryData?.[activeQueryIndex];
|
||||
return queryData?.filter?.expression || '';
|
||||
}, [currentQuery.builder.queryData, activeQueryIndex]);
|
||||
|
||||
// Check if this filter has active filters in the query
|
||||
const isSomeFilterPresentForCurrentAttribute = useMemo(
|
||||
() =>
|
||||
@@ -132,128 +112,47 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
filter.defaultOpen,
|
||||
]);
|
||||
|
||||
const { data, isLoading } = useGetAggregateValues(
|
||||
{
|
||||
aggregateOperator: filter.aggregateOperator || 'noop',
|
||||
dataSource: filter.dataSource || DataSource.LOGS,
|
||||
aggregateAttribute: filter.aggregateAttribute || '',
|
||||
attributeKey: filter.attributeKey.key,
|
||||
filterAttributeKeyDataType: filter.attributeKey.dataType || DataTypes.EMPTY,
|
||||
tagType: filter.attributeKey.type || '',
|
||||
searchText: searchText ?? '',
|
||||
},
|
||||
{
|
||||
enabled: isOpen && source !== QuickFiltersSource.METER_EXPLORER,
|
||||
keepPreviousData: true,
|
||||
},
|
||||
);
|
||||
|
||||
const {
|
||||
data: keyValueSuggestions,
|
||||
isLoading: isLoadingKeyValueSuggestions,
|
||||
refetch: refetchKeyValueSuggestions,
|
||||
} = useGetQueryKeyValueSuggestions({
|
||||
key: filter.attributeKey.key,
|
||||
signal: filter.dataSource || DataSource.LOGS,
|
||||
signalSource: source === QuickFiltersSource.METER_EXPLORER ? 'meter' : '',
|
||||
startUnixMilli,
|
||||
endUnixMilli,
|
||||
searchText: searchText || '',
|
||||
existingQuery: currentFilterExpression,
|
||||
signalSource: 'meter',
|
||||
options: {
|
||||
enabled: isOpen,
|
||||
enabled: isOpen && source === QuickFiltersSource.METER_EXPLORER,
|
||||
keepPreviousData: true,
|
||||
},
|
||||
});
|
||||
|
||||
const searchInputRef = useRef<InputRef | null>(null);
|
||||
const searchContainerRef = useRef<HTMLDivElement | null>(null);
|
||||
const previousFiltersItemsRef = useRef(
|
||||
currentQuery.builder.queryData?.[activeQueryIndex]?.filters?.items,
|
||||
);
|
||||
|
||||
// Refetch when other filters change (not this filter)
|
||||
// Watch for when filters.items is different from previous value, indicating other filters changed
|
||||
useEffect(() => {
|
||||
const currentFiltersItems =
|
||||
currentQuery.builder.queryData?.[activeQueryIndex]?.filters?.items;
|
||||
|
||||
const previousFiltersItems = previousFiltersItemsRef.current;
|
||||
|
||||
// Check if filters items have changed (not the same)
|
||||
const filtersChanged = !isEqual(previousFiltersItems, currentFiltersItems);
|
||||
|
||||
if (isOpen && filtersChanged) {
|
||||
// Check if OTHER filters (not this filter) have changed
|
||||
const currentOtherFilters = currentFiltersItems?.filter(
|
||||
(item) => !isEqual(item.key?.key, filter.attributeKey.key),
|
||||
);
|
||||
const previousOtherFilters = previousFiltersItems?.filter(
|
||||
(item) => !isEqual(item.key?.key, filter.attributeKey.key),
|
||||
);
|
||||
|
||||
// Refetch if other filters changed (not just this filter's values)
|
||||
const otherFiltersChanged = !isEqual(
|
||||
currentOtherFilters,
|
||||
previousOtherFilters,
|
||||
);
|
||||
|
||||
// Only update ref if we have valid API data or if filters actually changed
|
||||
// Don't update if search returned 0 results to preserve unchecked values
|
||||
const hasValidData = keyValueSuggestions && !isLoadingKeyValueSuggestions;
|
||||
if (otherFiltersChanged || hasValidData) {
|
||||
previousFiltersItemsRef.current = currentFiltersItems;
|
||||
}
|
||||
|
||||
if (otherFiltersChanged) {
|
||||
refetchKeyValueSuggestions();
|
||||
}
|
||||
} else {
|
||||
previousFiltersItemsRef.current = currentFiltersItems;
|
||||
}
|
||||
}, [
|
||||
activeQueryIndex,
|
||||
isOpen,
|
||||
refetchKeyValueSuggestions,
|
||||
filter.attributeKey.key,
|
||||
currentQuery.builder.queryData,
|
||||
keyValueSuggestions,
|
||||
isLoadingKeyValueSuggestions,
|
||||
]);
|
||||
|
||||
const handleSearchPromptClick = useCallback((): void => {
|
||||
if (searchContainerRef.current) {
|
||||
searchContainerRef.current.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
}
|
||||
if (searchInputRef.current) {
|
||||
setTimeout(() => searchInputRef.current?.focus({ cursor: 'end' }), 120);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const isDataComplete = useMemo(() => {
|
||||
if (keyValueSuggestions) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const responseData = keyValueSuggestions?.data as any;
|
||||
return responseData.data?.complete || false;
|
||||
}
|
||||
return false;
|
||||
}, [keyValueSuggestions]);
|
||||
|
||||
const previousUncheckedValuesRef = useRef<string[]>([]);
|
||||
|
||||
const attributeValues: string[] = useMemo(() => {
|
||||
// const dataType = filter.attributeKey.dataType || DataTypes.String;
|
||||
const dataType = filter.attributeKey.dataType || DataTypes.String;
|
||||
|
||||
if (keyValueSuggestions) {
|
||||
// Process the response data for all pages
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
if (source === QuickFiltersSource.METER_EXPLORER && keyValueSuggestions) {
|
||||
// Process the response data
|
||||
const responseData = keyValueSuggestions?.data as any;
|
||||
const values = responseData.data?.values || {};
|
||||
const relatedValues = values.relatedValues || [];
|
||||
const stringValues = values.stringValues || [];
|
||||
const numberValues = values.numberValues || [];
|
||||
|
||||
// Combine relatedValues first, then unique stringValues
|
||||
const valuesToUse = [
|
||||
...relatedValues,
|
||||
...stringValues.filter(
|
||||
(value: string | null | undefined) =>
|
||||
value !== null &&
|
||||
value !== undefined &&
|
||||
value !== '' &&
|
||||
!relatedValues.includes(value),
|
||||
),
|
||||
];
|
||||
|
||||
// Generate options from string values - explicitly handle empty strings
|
||||
const stringOptions = valuesToUse
|
||||
const stringOptions = stringValues
|
||||
// Strict filtering for empty string - we'll handle it as a special case if needed
|
||||
.filter(
|
||||
(value: string | null | undefined): value is string =>
|
||||
@@ -268,15 +167,15 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
)
|
||||
.map((value: number) => value.toString());
|
||||
|
||||
const baseValues = [...stringOptions, ...numberOptions];
|
||||
const previousUnchecked = previousUncheckedValuesRef.current || [];
|
||||
const preservedUnchecked = previousUnchecked.filter(
|
||||
(value) => !baseValues.includes(value),
|
||||
);
|
||||
return [...baseValues, ...preservedUnchecked];
|
||||
// Combine all options and make sure we don't have duplicate labels
|
||||
return [...stringOptions, ...numberOptions];
|
||||
}
|
||||
return [];
|
||||
}, [keyValueSuggestions]);
|
||||
|
||||
const key = DATA_TYPE_VS_ATTRIBUTE_VALUES_KEY[dataType];
|
||||
return (data?.payload?.[key] || []).filter(
|
||||
(val) => val !== undefined && val !== null,
|
||||
);
|
||||
}, [data?.payload, filter.attributeKey.dataType, keyValueSuggestions, source]);
|
||||
|
||||
const setSearchTextDebounced = useDebouncedFn((...args) => {
|
||||
setSearchText(args[0] as string);
|
||||
@@ -350,30 +249,22 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
const isMultipleValuesTrueForTheKey =
|
||||
Object.values(currentFilterState).filter((val) => val).length > 1;
|
||||
|
||||
// Sort checked items to the top; always show unchecked items beneath, regardless of pagination
|
||||
const {
|
||||
visibleCheckedValues,
|
||||
uncheckedValues,
|
||||
visibleCheckedCount,
|
||||
hasMoreChecked,
|
||||
} = useMemo(() => {
|
||||
// Sort checked items to the top, then unchecked items
|
||||
const currentAttributeKeys = useMemo(() => {
|
||||
const checkedValues = attributeValues.filter(
|
||||
(val) => currentFilterState[val],
|
||||
);
|
||||
const unchecked = attributeValues.filter((val) => !currentFilterState[val]);
|
||||
const visibleChecked = checkedValues.slice(0, visibleItemsCount);
|
||||
|
||||
return {
|
||||
visibleCheckedValues: visibleChecked,
|
||||
uncheckedValues: unchecked,
|
||||
visibleCheckedCount: visibleChecked.length,
|
||||
hasMoreChecked: checkedValues.length > visibleChecked.length,
|
||||
};
|
||||
const uncheckedValues = attributeValues.filter(
|
||||
(val) => !currentFilterState[val],
|
||||
);
|
||||
return [...checkedValues, ...uncheckedValues].slice(0, visibleItemsCount);
|
||||
}, [attributeValues, currentFilterState, visibleItemsCount]);
|
||||
|
||||
useEffect(() => {
|
||||
previousUncheckedValuesRef.current = uncheckedValues;
|
||||
}, [uncheckedValues]);
|
||||
// Count of checked values in the currently visible items
|
||||
const checkedValuesCount = useMemo(
|
||||
() => currentAttributeKeys.filter((val) => currentFilterState[val]).length,
|
||||
[currentAttributeKeys, currentFilterState],
|
||||
);
|
||||
|
||||
const handleClearFilterAttribute = (): void => {
|
||||
const preparedQuery: Query = {
|
||||
@@ -702,90 +593,35 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
)}
|
||||
</section>
|
||||
</section>
|
||||
{isOpen && isLoadingKeyValueSuggestions && !attributeValues.length && (
|
||||
<section className="loading">
|
||||
<Skeleton paragraph={{ rows: 4 }} />
|
||||
</section>
|
||||
)}
|
||||
{isOpen && !isLoadingKeyValueSuggestions && (
|
||||
{isOpen &&
|
||||
(isLoading || isLoadingKeyValueSuggestions) &&
|
||||
!attributeValues.length && (
|
||||
<section className="loading">
|
||||
<Skeleton paragraph={{ rows: 4 }} />
|
||||
</section>
|
||||
)}
|
||||
{isOpen && !isLoading && !isLoadingKeyValueSuggestions && (
|
||||
<>
|
||||
{!isEmptyStateWithDocsEnabled && (
|
||||
<section className="search" ref={searchContainerRef}>
|
||||
<section className="search">
|
||||
<Input
|
||||
placeholder="Search values"
|
||||
placeholder="Filter values"
|
||||
onChange={(e): void => setSearchTextDebounced(e.target.value)}
|
||||
disabled={isFilterDisabled}
|
||||
ref={searchInputRef}
|
||||
/>
|
||||
</section>
|
||||
)}
|
||||
{attributeValues.length > 0 ? (
|
||||
<section className="values">
|
||||
{visibleCheckedValues.map((value: string) => (
|
||||
{currentAttributeKeys.map((value: string, index: number) => (
|
||||
<Fragment key={value}>
|
||||
<div className="value">
|
||||
<Checkbox
|
||||
onChange={(e): void => onChange(value, e.target.checked, false)}
|
||||
checked={currentFilterState[value]}
|
||||
disabled={isFilterDisabled}
|
||||
rootClassName="check-box"
|
||||
/>
|
||||
|
||||
{index === checkedValuesCount && checkedValuesCount > 0 && (
|
||||
<div
|
||||
className={cx(
|
||||
'checkbox-value-section',
|
||||
isFilterDisabled ? 'filter-disabled' : '',
|
||||
)}
|
||||
onClick={(): void => {
|
||||
if (isFilterDisabled) {
|
||||
return;
|
||||
}
|
||||
onChange(value, currentFilterState[value], true);
|
||||
}}
|
||||
>
|
||||
<div className={`${filter.title} label-${value}`} />
|
||||
{filter.customRendererForValue ? (
|
||||
filter.customRendererForValue(value)
|
||||
) : (
|
||||
<Typography.Text
|
||||
className="value-string"
|
||||
ellipsis={{ tooltip: { placement: 'top' } }}
|
||||
>
|
||||
{String(value)}
|
||||
</Typography.Text>
|
||||
)}
|
||||
<Button type="text" className="only-btn">
|
||||
{isSomeFilterPresentForCurrentAttribute
|
||||
? currentFilterState[value] && !isMultipleValuesTrueForTheKey
|
||||
? 'All'
|
||||
: 'Only'
|
||||
: 'Only'}
|
||||
</Button>
|
||||
<Button type="text" className="toggle-btn">
|
||||
Toggle
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Fragment>
|
||||
))}
|
||||
|
||||
{hasMoreChecked && (
|
||||
<section className="show-more">
|
||||
<Typography.Text
|
||||
className="show-more-text"
|
||||
onClick={(): void => setVisibleItemsCount((prev) => prev + 10)}
|
||||
>
|
||||
Show More...
|
||||
</Typography.Text>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{visibleCheckedCount > 0 && uncheckedValues.length > 0 && (
|
||||
<div className="filter-separator" data-testid="filter-separator" />
|
||||
)}
|
||||
|
||||
{uncheckedValues.map((value: string) => (
|
||||
<Fragment key={value}>
|
||||
key="separator"
|
||||
className="filter-separator"
|
||||
data-testid="filter-separator"
|
||||
/>
|
||||
)}
|
||||
<div className="value">
|
||||
<Checkbox
|
||||
onChange={(e): void => onChange(value, e.target.checked, false)}
|
||||
@@ -845,18 +681,16 @@ export default function CheckboxFilter(props: ICheckboxProps): JSX.Element {
|
||||
<Typography.Text>No values found</Typography.Text>{' '}
|
||||
</section>
|
||||
)}
|
||||
{visibleItemsCount >= attributeValues?.length &&
|
||||
attributeValues?.length > 0 &&
|
||||
!isDataComplete && (
|
||||
<section className="search-prompt" onClick={handleSearchPromptClick}>
|
||||
<AlertTriangle size={16} className="search-prompt__icon" />
|
||||
<span className="search-prompt__text">
|
||||
<Typography.Text className="search-prompt__subtitle">
|
||||
Tap to search and load more suggestions.
|
||||
</Typography.Text>
|
||||
</span>
|
||||
</section>
|
||||
)}
|
||||
{visibleItemsCount < attributeValues?.length && (
|
||||
<section className="show-more">
|
||||
<Typography.Text
|
||||
className="show-more-text"
|
||||
onClick={(): void => setVisibleItemsCount((prev) => prev + 10)}
|
||||
>
|
||||
Show More...
|
||||
</Typography.Text>
|
||||
</section>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -127,34 +127,6 @@
|
||||
align-items: center;
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
.filters-info {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 6px 10px 0 10px;
|
||||
color: var(--bg-vanilla-400);
|
||||
gap: 6px;
|
||||
flex-wrap: wrap;
|
||||
|
||||
.filters-info-toggle {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 6px;
|
||||
padding: 0;
|
||||
height: auto;
|
||||
color: var(--bg-vanilla-400);
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-robin-500);
|
||||
}
|
||||
}
|
||||
|
||||
.filters-info-text {
|
||||
color: var(--bg-vanilla-400);
|
||||
font-size: 13px;
|
||||
line-height: 16px;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.perilin-bg {
|
||||
@@ -208,21 +180,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.filters-info {
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
.filters-info-toggle {
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
&:hover {
|
||||
color: var(--bg-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.filters-info-text {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
ComboboxList,
|
||||
ComboboxTrigger,
|
||||
} from '@signozhq/combobox';
|
||||
import { Button, Skeleton, Switch, Tooltip, Typography } from 'antd';
|
||||
import { Skeleton, Switch, Tooltip, Typography } from 'antd';
|
||||
import getLocalStorageKey from 'api/browser/localstorage/get';
|
||||
import setLocalStorageKey from 'api/browser/localstorage/set';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
@@ -23,7 +23,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { useApiMonitoringParams } from 'container/ApiMonitoring/queryParams';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { isFunction, isNull } from 'lodash-es';
|
||||
import { Frown, Lightbulb, Settings2 as SettingsIcon } from 'lucide-react';
|
||||
import { Frown, Settings2 as SettingsIcon } from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { Query } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { USER_ROLES } from 'types/roles';
|
||||
@@ -51,7 +51,6 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
|
||||
} = props;
|
||||
const { user } = useAppContext();
|
||||
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
|
||||
const [isFiltersInfoOpen, setIsFiltersInfoOpen] = useState(false);
|
||||
const isAdmin = user.role === USER_ROLES.ADMIN;
|
||||
const [params, setParams] = useApiMonitoringParams();
|
||||
const showIP = params.showIP ?? true;
|
||||
@@ -292,21 +291,6 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<section className="filters-info">
|
||||
<Button
|
||||
type="link"
|
||||
className="filters-info-toggle"
|
||||
onClick={(): void => setIsFiltersInfoOpen((prev) => !prev)}
|
||||
>
|
||||
<Lightbulb size={15} />
|
||||
{isFiltersInfoOpen ? 'Hide info' : 'Adaptive filters'}
|
||||
</Button>
|
||||
{isFiltersInfoOpen && (
|
||||
<Typography.Text className="filters-info-text">
|
||||
Filter values update automatically based on other selected filters
|
||||
</Typography.Text>
|
||||
)}
|
||||
</section>
|
||||
<section className="filters">
|
||||
{filterConfig.map((filter) => {
|
||||
switch (filter.type) {
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
useApiMonitoringParams,
|
||||
} from 'container/ApiMonitoring/queryParams';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { useGetQueryKeyValueSuggestions } from 'hooks/querySuggestions/useGetQueryKeyValueSuggestions';
|
||||
import {
|
||||
otherFiltersResponse,
|
||||
quickFiltersAttributeValuesResponse,
|
||||
@@ -25,8 +24,6 @@ jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
|
||||
}));
|
||||
jest.mock('container/ApiMonitoring/queryParams');
|
||||
|
||||
jest.mock('hooks/querySuggestions/useGetQueryKeyValueSuggestions');
|
||||
|
||||
const handleFilterVisibilityChange = jest.fn();
|
||||
const redirectWithQueryBuilderData = jest.fn();
|
||||
const putHandler = jest.fn();
|
||||
@@ -35,15 +32,13 @@ const mockSetApiMonitoringParams = jest.fn() as jest.MockedFunction<
|
||||
>;
|
||||
const mockUseApiMonitoringParams = jest.mocked(useApiMonitoringParams);
|
||||
|
||||
const mockUseGetQueryKeyValueSuggestions = jest.mocked(
|
||||
useGetQueryKeyValueSuggestions,
|
||||
);
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL;
|
||||
const SIGNAL = SignalType.LOGS;
|
||||
const quickFiltersListURL = `${BASE_URL}/api/v1/orgs/me/filters/${SIGNAL}`;
|
||||
const saveQuickFiltersURL = `${BASE_URL}/api/v1/orgs/me/filters`;
|
||||
const quickFiltersSuggestionsURL = `${BASE_URL}/api/v3/filter_suggestions`;
|
||||
const quickFiltersAttributeValuesURL = `${BASE_URL}/api/v3/autocomplete/attribute_values`;
|
||||
const fieldsValuesURL = `${BASE_URL}/api/v1/fields/values`;
|
||||
|
||||
const FILTER_OS_DESCRIPTION = 'os.description';
|
||||
const FILTER_K8S_DEPLOYMENT_NAME = 'k8s.deployment.name';
|
||||
@@ -67,7 +62,10 @@ const setupServer = (): void => {
|
||||
putHandler(await req.json());
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
rest.get('*/api/v1/fields/values*', (_req, res, ctx) =>
|
||||
rest.get(quickFiltersAttributeValuesURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
|
||||
),
|
||||
rest.get(fieldsValuesURL, (_req, res, ctx) =>
|
||||
res(ctx.status(200), ctx.json(quickFiltersAttributeValuesResponse)),
|
||||
),
|
||||
);
|
||||
@@ -137,24 +135,14 @@ beforeEach(() => {
|
||||
queryData: [
|
||||
{
|
||||
queryName: QUERY_NAME,
|
||||
filters: { items: [], op: 'AND' },
|
||||
filter: { expression: '' },
|
||||
filters: { items: [{ key: 'test', value: 'value' }] },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
lastUsedQuery: 0,
|
||||
panelType: 'logs',
|
||||
redirectWithQueryBuilderData,
|
||||
});
|
||||
|
||||
// Mock the hook to return data with mq-kafka
|
||||
mockUseGetQueryKeyValueSuggestions.mockReturnValue({
|
||||
data: quickFiltersAttributeValuesResponse,
|
||||
isLoading: false,
|
||||
refetch: jest.fn(),
|
||||
} as any);
|
||||
|
||||
mockUseApiMonitoringParams.mockReturnValue([
|
||||
{ showIP: true } as ApiMonitoringParams,
|
||||
mockSetApiMonitoringParams,
|
||||
@@ -271,9 +259,8 @@ describe('Quick Filters', () => {
|
||||
|
||||
render(<TestQuickFilters />);
|
||||
|
||||
// Wait for the filter to load with data
|
||||
const target = await screen.findByText('mq-kafka', {}, { timeout: 5000 });
|
||||
|
||||
// Prefer role if possible; if label text isn’t wired to input, clicking the label text is OK
|
||||
const target = await screen.findByText('mq-kafka');
|
||||
await user.click(target);
|
||||
|
||||
await waitFor(() => {
|
||||
|
||||
@@ -9,74 +9,6 @@
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
padding: 0 8px;
|
||||
box-sizing: border-box;
|
||||
|
||||
.dashboard-breadcrumbs {
|
||||
width: 100%;
|
||||
height: 48px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
max-width: 80%;
|
||||
|
||||
.dashboard-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.dashboard-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.id-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
padding: 0px 2px;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
height: 20px;
|
||||
|
||||
max-width: calc(100% - 120px);
|
||||
|
||||
span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 4px;
|
||||
}
|
||||
}
|
||||
.id-btn:hover {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-300);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dashboard-details {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@@ -535,15 +467,6 @@
|
||||
.dashboard-description-container {
|
||||
color: var(--bg-ink-400);
|
||||
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
.dashboard-breadcrumbs {
|
||||
.dashboard-btn {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.dashboard-details {
|
||||
.left-section {
|
||||
.dashboard-title {
|
||||
|
||||
@@ -16,9 +16,7 @@ import {
|
||||
} from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ConfigureIcon from 'assets/Integrations/ConfigureIcon';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
import { PANEL_GROUP_TYPES, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { DeleteButton } from 'container/ListOfDashboard/TableComponents/DeleteButton';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
|
||||
@@ -27,7 +25,6 @@ import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
|
||||
import useComponentPermission from 'hooks/useComponentPermission';
|
||||
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import {
|
||||
Check,
|
||||
@@ -37,7 +34,6 @@ import {
|
||||
FolderKanban,
|
||||
Fullscreen,
|
||||
Globe,
|
||||
LayoutGrid,
|
||||
LockKeyhole,
|
||||
PenLine,
|
||||
X,
|
||||
@@ -51,6 +47,7 @@ import { ROLES, USER_ROLES } from 'types/roles';
|
||||
import { ComponentTypes } from 'utils/permission';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
|
||||
import DashboardHeader from '../components/DashboardHeader/DashboardHeader';
|
||||
import DashboardGraphSlider from '../ComponentsSlider';
|
||||
import DashboardSettings from '../DashboardSettings';
|
||||
import { Base64Icons } from '../DashboardSettings/General/utils';
|
||||
@@ -71,7 +68,6 @@ interface DashboardDescriptionProps {
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { handle } = props;
|
||||
const {
|
||||
selectedDashboard,
|
||||
@@ -80,7 +76,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
layouts,
|
||||
setLayouts,
|
||||
isDashboardLocked,
|
||||
listSortOrder,
|
||||
setSelectedDashboard,
|
||||
handleToggleDashboardSlider,
|
||||
setSelectedRowWidgetId,
|
||||
@@ -292,17 +287,6 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
});
|
||||
}
|
||||
|
||||
function goToListPage(): void {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}
|
||||
|
||||
const {
|
||||
data: publicDashboardResponse,
|
||||
isLoading: isLoadingPublicDashboardData,
|
||||
@@ -351,32 +335,7 @@ function DashboardDescription(props: DashboardDescriptionProps): JSX.Element {
|
||||
|
||||
return (
|
||||
<Card className="dashboard-description-container">
|
||||
<div className="dashboard-header">
|
||||
<section className="dashboard-breadcrumbs">
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LayoutGrid size={14} />}
|
||||
className="dashboard-btn"
|
||||
onClick={(): void => goToListPage()}
|
||||
>
|
||||
Dashboard /
|
||||
</Button>
|
||||
<Button type="text" className="id-btn dashboard-name-btn">
|
||||
<img
|
||||
src={image}
|
||||
alt="dashboard-icon"
|
||||
style={{ height: '14px', width: '14px' }}
|
||||
/>
|
||||
{title}
|
||||
</Button>
|
||||
</section>
|
||||
|
||||
<HeaderRightSection
|
||||
enableAnnouncements={false}
|
||||
enableShare
|
||||
enableFeedback
|
||||
/>
|
||||
</div>
|
||||
<DashboardHeader />
|
||||
<section className="dashboard-details">
|
||||
<div className="left-section">
|
||||
<img src={image} alt="dashboard-img" className="dashboard-img" />
|
||||
|
||||
@@ -0,0 +1,71 @@
|
||||
.dashboard-breadcrumbs {
|
||||
width: 100%;
|
||||
height: 48px;
|
||||
display: flex;
|
||||
gap: 6px;
|
||||
align-items: center;
|
||||
max-width: 80%;
|
||||
|
||||
.dashboard-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
color: var(--bg-vanilla-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
letter-spacing: -0.07px;
|
||||
padding: 0px;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.dashboard-btn:hover {
|
||||
background-color: unset;
|
||||
}
|
||||
|
||||
.id-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 0px 2px;
|
||||
border-radius: 2px;
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-400);
|
||||
font-family: Inter;
|
||||
font-size: 14px;
|
||||
font-style: normal;
|
||||
font-weight: 400;
|
||||
line-height: 20px; /* 142.857% */
|
||||
height: 20px;
|
||||
|
||||
max-width: calc(100% - 120px);
|
||||
|
||||
span {
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.ant-btn-icon {
|
||||
margin-inline-end: 4px;
|
||||
}
|
||||
}
|
||||
.id-btn:hover {
|
||||
background: rgba(113, 144, 249, 0.1);
|
||||
color: var(--bg-robin-300);
|
||||
}
|
||||
|
||||
.dashboard-icon-image {
|
||||
height: 14px;
|
||||
width: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.dashboard-breadcrumbs {
|
||||
.dashboard-btn {
|
||||
color: var(--bg-ink-400);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useCallback } from 'react';
|
||||
import { Button } from 'antd';
|
||||
import ROUTES from 'constants/routes';
|
||||
import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import { LayoutGrid } from 'lucide-react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { DashboardData } from 'types/api/dashboard/getAll';
|
||||
|
||||
import { Base64Icons } from '../../DashboardSettings/General/utils';
|
||||
|
||||
import './DashboardBreadcrumbs.styles.scss';
|
||||
|
||||
function DashboardBreadcrumbs(): JSX.Element {
|
||||
const { safeNavigate } = useSafeNavigate();
|
||||
const { selectedDashboard, listSortOrder } = useDashboard();
|
||||
|
||||
const selectedData = selectedDashboard
|
||||
? {
|
||||
...selectedDashboard.data,
|
||||
uuid: selectedDashboard.id,
|
||||
}
|
||||
: ({} as DashboardData);
|
||||
|
||||
const { title = '', image = Base64Icons[0] } = selectedData || {};
|
||||
|
||||
const goToListPage = useCallback(() => {
|
||||
const urlParams = new URLSearchParams();
|
||||
urlParams.set('columnKey', listSortOrder.columnKey as string);
|
||||
urlParams.set('order', listSortOrder.order as string);
|
||||
urlParams.set('page', listSortOrder.pagination as string);
|
||||
urlParams.set('search', listSortOrder.search as string);
|
||||
|
||||
const generatedUrl = `${ROUTES.ALL_DASHBOARD}?${urlParams.toString()}`;
|
||||
safeNavigate(generatedUrl);
|
||||
}, [listSortOrder, safeNavigate]);
|
||||
|
||||
return (
|
||||
<div className="dashboard-breadcrumbs">
|
||||
<Button
|
||||
type="text"
|
||||
icon={<LayoutGrid size={14} />}
|
||||
className="dashboard-btn"
|
||||
onClick={goToListPage}
|
||||
>
|
||||
Dashboard /
|
||||
</Button>
|
||||
<Button type="text" className="id-btn dashboard-name-btn">
|
||||
<img src={image} alt="dashboard-icon" className="dashboard-icon-image" />
|
||||
{title}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default DashboardBreadcrumbs;
|
||||
@@ -0,0 +1,15 @@
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-slate-400);
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
padding: 0 8px;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.dashboard-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { memo } from 'react';
|
||||
import HeaderRightSection from 'components/HeaderRightSection/HeaderRightSection';
|
||||
|
||||
import DashboardBreadcrumbs from './DashboardBreadcrumbs';
|
||||
|
||||
import './DashboardHeader.styles.scss';
|
||||
|
||||
function DashboardHeader(): JSX.Element {
|
||||
return (
|
||||
<div className="dashboard-header">
|
||||
<DashboardBreadcrumbs />
|
||||
<HeaderRightSection enableAnnouncements={false} enableShare enableFeedback />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default memo(DashboardHeader);
|
||||
@@ -23,6 +23,7 @@ export default function ChartWrapper({
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
showTooltip = true,
|
||||
showLegend = true,
|
||||
canPinTooltip = false,
|
||||
syncMode,
|
||||
syncKey,
|
||||
@@ -36,6 +37,9 @@ export default function ChartWrapper({
|
||||
|
||||
const legendComponent = useCallback(
|
||||
(averageLegendWidth: number): React.ReactNode => {
|
||||
if (!showLegend) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Legend
|
||||
config={config}
|
||||
@@ -44,7 +48,7 @@ export default function ChartWrapper({
|
||||
/>
|
||||
);
|
||||
},
|
||||
[config, legendConfig.position],
|
||||
[config, legendConfig.position, showLegend],
|
||||
);
|
||||
|
||||
const renderTooltipCallback = useCallback(
|
||||
@@ -60,6 +64,7 @@ export default function ChartWrapper({
|
||||
return (
|
||||
<PlotContextProvider>
|
||||
<ChartLayout
|
||||
showLegend={showLegend}
|
||||
config={config}
|
||||
containerWidth={containerWidth}
|
||||
containerHeight={containerHeight}
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
import { useCallback } from 'react';
|
||||
import ChartWrapper from 'container/DashboardContainer/visualization/charts/ChartWrapper/ChartWrapper';
|
||||
import HistogramTooltip from 'lib/uPlotV2/components/Tooltip/HistogramTooltip';
|
||||
import { buildTooltipContent } from 'lib/uPlotV2/components/Tooltip/utils';
|
||||
import {
|
||||
HistogramTooltipProps,
|
||||
TooltipRenderArgs,
|
||||
} from 'lib/uPlotV2/components/types';
|
||||
|
||||
import { HistogramChartProps } from '../types';
|
||||
|
||||
export default function Histogram(props: HistogramChartProps): JSX.Element {
|
||||
const {
|
||||
children,
|
||||
renderTooltip: customRenderTooltip,
|
||||
isQueriesMerged,
|
||||
...rest
|
||||
} = props;
|
||||
|
||||
const renderTooltip = useCallback(
|
||||
(props: TooltipRenderArgs): React.ReactNode => {
|
||||
if (customRenderTooltip) {
|
||||
return customRenderTooltip(props);
|
||||
}
|
||||
const content = buildTooltipContent({
|
||||
data: props.uPlotInstance.data,
|
||||
series: props.uPlotInstance.series,
|
||||
dataIndexes: props.dataIndexes,
|
||||
activeSeriesIndex: props.seriesIndex,
|
||||
uPlotInstance: props.uPlotInstance,
|
||||
yAxisUnit: rest.yAxisUnit ?? '',
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
});
|
||||
const tooltipProps: HistogramTooltipProps = {
|
||||
...props,
|
||||
timezone: rest.timezone,
|
||||
yAxisUnit: rest.yAxisUnit,
|
||||
decimalPrecision: rest.decimalPrecision,
|
||||
content,
|
||||
};
|
||||
return <HistogramTooltip {...tooltipProps} />;
|
||||
},
|
||||
[customRenderTooltip, rest.timezone, rest.yAxisUnit, rest.decimalPrecision],
|
||||
);
|
||||
|
||||
return (
|
||||
<ChartWrapper
|
||||
showLegend={!isQueriesMerged}
|
||||
{...rest}
|
||||
renderTooltip={renderTooltip}
|
||||
>
|
||||
{children}
|
||||
</ChartWrapper>
|
||||
);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ interface BaseChartProps {
|
||||
width: number;
|
||||
height: number;
|
||||
showTooltip?: boolean;
|
||||
showLegend?: boolean;
|
||||
timezone: string;
|
||||
canPinTooltip?: boolean;
|
||||
yAxisUnit?: string;
|
||||
@@ -17,6 +18,7 @@ interface BaseChartProps {
|
||||
interface UPlotBasedChartProps {
|
||||
config: UPlotConfigBuilder;
|
||||
data: uPlot.AlignedData;
|
||||
legendConfig: LegendConfig;
|
||||
syncMode?: DashboardCursorSync;
|
||||
syncKey?: string;
|
||||
plotRef?: (plot: uPlot | null) => void;
|
||||
@@ -26,14 +28,20 @@ interface UPlotBasedChartProps {
|
||||
}
|
||||
|
||||
export interface TimeSeriesChartProps
|
||||
extends BaseChartProps,
|
||||
UPlotBasedChartProps {}
|
||||
|
||||
export interface HistogramChartProps
|
||||
extends BaseChartProps,
|
||||
UPlotBasedChartProps {
|
||||
legendConfig: LegendConfig;
|
||||
isQueriesMerged?: boolean;
|
||||
}
|
||||
|
||||
export interface BarChartProps extends BaseChartProps, UPlotBasedChartProps {
|
||||
legendConfig: LegendConfig;
|
||||
isStackedBarChart?: boolean;
|
||||
}
|
||||
|
||||
export type ChartProps = TimeSeriesChartProps | BarChartProps;
|
||||
export type ChartProps =
|
||||
| TimeSeriesChartProps
|
||||
| BarChartProps
|
||||
| HistogramChartProps;
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
import { renderHook } from '@testing-library/react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
import { useScrollWidgetIntoView } from '../useScrollWidgetIntoView';
|
||||
|
||||
jest.mock('providers/Dashboard/Dashboard');
|
||||
|
||||
type MockHTMLElement = {
|
||||
scrollIntoView: jest.Mock;
|
||||
focus: jest.Mock;
|
||||
};
|
||||
|
||||
function createMockElement(): MockHTMLElement {
|
||||
return {
|
||||
scrollIntoView: jest.fn(),
|
||||
focus: jest.fn(),
|
||||
};
|
||||
}
|
||||
|
||||
describe('useScrollWidgetIntoView', () => {
|
||||
const mockedUseDashboard = useDashboard as jest.MockedFunction<
|
||||
typeof useDashboard
|
||||
>;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('scrolls into view and focuses when toScrollWidgetId matches widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'widget-id',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
expect(mockElement.scrollIntoView).toHaveBeenCalledWith({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
expect(mockElement.focus).toHaveBeenCalled();
|
||||
expect(setToScrollWidgetId).toHaveBeenCalledWith('');
|
||||
});
|
||||
|
||||
it('does nothing when toScrollWidgetId does not match widget id', () => {
|
||||
const setToScrollWidgetId = jest.fn();
|
||||
const mockElement = createMockElement();
|
||||
const ref = ({
|
||||
current: mockElement,
|
||||
} as unknown) as React.RefObject<HTMLDivElement>;
|
||||
|
||||
mockedUseDashboard.mockReturnValue(({
|
||||
toScrollWidgetId: 'other-widget',
|
||||
setToScrollWidgetId,
|
||||
} as unknown) as ReturnType<typeof useDashboard>);
|
||||
|
||||
renderHook(() => useScrollWidgetIntoView('widget-id', ref));
|
||||
|
||||
expect(mockElement.scrollIntoView).not.toHaveBeenCalled();
|
||||
expect(mockElement.focus).not.toHaveBeenCalled();
|
||||
expect(setToScrollWidgetId).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,26 @@
|
||||
import { RefObject, useEffect } from 'react';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
|
||||
/**
|
||||
* Scrolls the given widget container into view when the dashboard
|
||||
* requests it via `toScrollWidgetId`.
|
||||
*
|
||||
* Intended for use in panel components that render a single widget.
|
||||
*/
|
||||
export function useScrollWidgetIntoView<T extends HTMLElement>(
|
||||
widgetId: string,
|
||||
widgetContainerRef: RefObject<T>,
|
||||
): void {
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widgetId) {
|
||||
widgetContainerRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
widgetContainerRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widgetId, widgetContainerRef]);
|
||||
}
|
||||
@@ -1,12 +1,14 @@
|
||||
import { useMemo } from 'react';
|
||||
import cx from 'classnames';
|
||||
import { calculateChartDimensions } from 'container/DashboardContainer/visualization/charts/utils';
|
||||
import { MAX_LEGEND_WIDTH } from 'lib/uPlotV2/components/Legend/Legend';
|
||||
import { LegendConfig, LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
|
||||
import './ChartLayout.styles.scss';
|
||||
|
||||
export interface ChartLayoutProps {
|
||||
showLegend?: boolean;
|
||||
legendComponent: (legendPerSet: number) => React.ReactNode;
|
||||
children: (props: {
|
||||
chartWidth: number;
|
||||
@@ -20,6 +22,7 @@ export interface ChartLayoutProps {
|
||||
config: UPlotConfigBuilder;
|
||||
}
|
||||
export default function ChartLayout({
|
||||
showLegend = true,
|
||||
legendComponent,
|
||||
children,
|
||||
layoutChildren,
|
||||
@@ -30,6 +33,15 @@ export default function ChartLayout({
|
||||
}: ChartLayoutProps): JSX.Element {
|
||||
const chartDimensions = useMemo(
|
||||
() => {
|
||||
if (!showLegend) {
|
||||
return {
|
||||
width: containerWidth,
|
||||
height: containerHeight,
|
||||
legendWidth: 0,
|
||||
legendHeight: 0,
|
||||
averageLegendWidth: MAX_LEGEND_WIDTH,
|
||||
};
|
||||
}
|
||||
const legendItemsMap = config.getLegendItems();
|
||||
const seriesLabels = Object.values(legendItemsMap)
|
||||
.map((item) => item.label)
|
||||
@@ -42,7 +54,7 @@ export default function ChartLayout({
|
||||
});
|
||||
},
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
[containerWidth, containerHeight, legendConfig],
|
||||
[containerWidth, containerHeight, legendConfig, showLegend],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -60,15 +72,17 @@ export default function ChartLayout({
|
||||
averageLegendWidth: chartDimensions.averageLegendWidth,
|
||||
})}
|
||||
</div>
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
{showLegend && (
|
||||
<div
|
||||
className="chart-layout__legend-wrapper"
|
||||
style={{
|
||||
height: chartDimensions.legendHeight,
|
||||
width: chartDimensions.legendWidth,
|
||||
}}
|
||||
>
|
||||
{legendComponent(chartDimensions.averageLegendWidth)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{layoutChildren}
|
||||
</div>
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import ContextMenu from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
@@ -27,7 +27,6 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const uPlotRef = useRef<uPlot | null>(null);
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
@@ -36,16 +35,7 @@ function BarPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
import { useMemo, useRef } from 'react';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { DashboardCursorSync } from 'lib/uPlotV2/plugins/TooltipPlugin/types';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
|
||||
import Histogram from '../../charts/Histogram/Histogram';
|
||||
import ChartManager from '../../components/ChartManager/ChartManager';
|
||||
import {
|
||||
prepareHistogramPanelConfig,
|
||||
prepareHistogramPanelData,
|
||||
} from './utils';
|
||||
|
||||
import '../Panel.styles.scss';
|
||||
|
||||
function HistogramPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const {
|
||||
panelMode,
|
||||
queryResponse,
|
||||
widget,
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const uPlotRef = useRef<uPlot | null>(null);
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const containerDimensions = useResizeObserver(graphRef);
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
const config = useMemo(() => {
|
||||
return prepareHistogramPanelConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
panelMode,
|
||||
});
|
||||
}, [widget, isDarkMode, queryResponse?.data?.payload, panelMode]);
|
||||
|
||||
const chartData = useMemo(() => {
|
||||
if (!queryResponse?.data?.payload) {
|
||||
return [];
|
||||
}
|
||||
return prepareHistogramPanelData({
|
||||
apiResponse: queryResponse?.data?.payload as MetricRangePayloadProps,
|
||||
bucketWidth: widget?.bucketWidth,
|
||||
bucketCount: widget?.bucketCount,
|
||||
mergeAllActiveQueries: widget?.mergeAllActiveQueries,
|
||||
});
|
||||
}, [
|
||||
queryResponse?.data?.payload,
|
||||
widget?.bucketWidth,
|
||||
widget?.bucketCount,
|
||||
widget?.mergeAllActiveQueries,
|
||||
]);
|
||||
|
||||
const layoutChildren = useMemo(() => {
|
||||
if (!isFullViewMode || widget.mergeAllActiveQueries) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<ChartManager
|
||||
config={config}
|
||||
alignedData={chartData}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
onCancel={onToggleModelHandler}
|
||||
/>
|
||||
);
|
||||
}, [
|
||||
isFullViewMode,
|
||||
config,
|
||||
chartData,
|
||||
widget.yAxisUnit,
|
||||
onToggleModelHandler,
|
||||
widget.mergeAllActiveQueries,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div className="panel-container" ref={graphRef}>
|
||||
{containerDimensions.width > 0 && containerDimensions.height > 0 && (
|
||||
<Histogram
|
||||
config={config}
|
||||
legendConfig={{
|
||||
position: widget?.legendPosition ?? LegendPosition.BOTTOM,
|
||||
}}
|
||||
plotRef={(plot: uPlot | null): void => {
|
||||
uPlotRef.current = plot;
|
||||
}}
|
||||
onDestroy={(): void => {
|
||||
uPlotRef.current = null;
|
||||
}}
|
||||
isQueriesMerged={widget.mergeAllActiveQueries}
|
||||
yAxisUnit={widget.yAxisUnit}
|
||||
decimalPrecision={widget.decimalPrecision}
|
||||
syncMode={DashboardCursorSync.Crosshair}
|
||||
timezone={timezone.value}
|
||||
data={chartData as uPlot.AlignedData}
|
||||
width={containerDimensions.width}
|
||||
height={containerDimensions.height}
|
||||
layoutChildren={layoutChildren}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default HistogramPanel;
|
||||
@@ -0,0 +1,223 @@
|
||||
/* eslint-disable simple-import-sort/imports */
|
||||
import type { UseQueryResult } from 'react-query';
|
||||
import { render, screen } from 'tests/test-utils';
|
||||
|
||||
import { PanelMode } from 'container/DashboardContainer/visualization/panels/types';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import {
|
||||
MetricQueryRangeSuccessResponse,
|
||||
MetricRangePayloadProps,
|
||||
} from 'types/api/metrics/getQueryRange';
|
||||
|
||||
import HistogramPanel from '../HistogramPanel';
|
||||
import { HistogramChartProps } from 'container/DashboardContainer/visualization/charts/types';
|
||||
|
||||
jest.mock('hooks/useDimensions', () => ({
|
||||
useResizeObserver: jest.fn().mockReturnValue({ width: 800, height: 400 }),
|
||||
}));
|
||||
|
||||
jest.mock('hooks/useDarkMode', () => ({
|
||||
useIsDarkMode: jest.fn().mockReturnValue(false),
|
||||
}));
|
||||
|
||||
jest.mock('providers/Timezone', () => ({
|
||||
__esModule: true,
|
||||
// Provide a no-op provider component so AllTheProviders can render
|
||||
default: ({ children }: { children: React.ReactNode }): JSX.Element => (
|
||||
<>{children}</>
|
||||
),
|
||||
// And mock the hook used by HistogramPanel
|
||||
useTimezone: jest.fn().mockReturnValue({
|
||||
timezone: { value: 'UTC' },
|
||||
}),
|
||||
}));
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView',
|
||||
() => ({
|
||||
useScrollWidgetIntoView: jest.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/charts/Histogram/Histogram',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: (props: HistogramChartProps): JSX.Element => (
|
||||
<div data-testid="histogram-chart">
|
||||
<div data-testid="histogram-props">
|
||||
{JSON.stringify({
|
||||
legendPosition: props.legendConfig?.position,
|
||||
isQueriesMerged: props.isQueriesMerged,
|
||||
yAxisUnit: props.yAxisUnit,
|
||||
decimalPrecision: props.decimalPrecision,
|
||||
})}
|
||||
</div>
|
||||
{props.layoutChildren}
|
||||
</div>
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
jest.mock(
|
||||
'container/DashboardContainer/visualization/components/ChartManager/ChartManager',
|
||||
() => ({
|
||||
__esModule: true,
|
||||
default: (): JSX.Element => (
|
||||
<div data-testid="chart-manager">ChartManager</div>
|
||||
),
|
||||
}),
|
||||
);
|
||||
|
||||
function createQueryResponse(
|
||||
payloadOverrides: Partial<MetricRangePayloadProps> = {},
|
||||
): { data: { payload: MetricRangePayloadProps } } {
|
||||
const basePayload: MetricRangePayloadProps = {
|
||||
data: {
|
||||
result: [
|
||||
{
|
||||
metric: {},
|
||||
queryName: 'A',
|
||||
legend: 'Series A',
|
||||
values: [
|
||||
[1, '10'],
|
||||
[2, '20'],
|
||||
],
|
||||
},
|
||||
],
|
||||
resultType: 'matrix',
|
||||
newResult: {
|
||||
data: {
|
||||
result: [],
|
||||
resultType: 'matrix',
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
data: {
|
||||
payload: {
|
||||
...basePayload,
|
||||
...payloadOverrides,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
type WidgetLike = {
|
||||
id: string;
|
||||
yAxisUnit: string;
|
||||
decimalPrecision: number;
|
||||
legendPosition: LegendPosition;
|
||||
mergeAllActiveQueries: boolean;
|
||||
};
|
||||
|
||||
function createWidget(overrides: Partial<WidgetLike> = {}): WidgetLike {
|
||||
return {
|
||||
id: 'widget-id',
|
||||
yAxisUnit: 'ms',
|
||||
decimalPrecision: 2,
|
||||
legendPosition: LegendPosition.BOTTOM,
|
||||
mergeAllActiveQueries: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
describe('HistogramPanel', () => {
|
||||
it('renders Histogram when container has dimensions', () => {
|
||||
const widget = (createWidget() as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode={false}
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('histogram-chart')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('passes legend position and other props to Histogram', () => {
|
||||
const widget = (createWidget({
|
||||
legendPosition: LegendPosition.RIGHT,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode={false}
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
const propsJson = screen.getByTestId('histogram-props').textContent || '{}';
|
||||
const parsed = JSON.parse(propsJson);
|
||||
|
||||
expect(parsed.legendPosition).toBe(LegendPosition.RIGHT);
|
||||
expect(parsed.yAxisUnit).toBe('ms');
|
||||
expect(parsed.decimalPrecision).toBe(2);
|
||||
});
|
||||
|
||||
it('renders ChartManager in full view when queries are not merged', () => {
|
||||
const widget = (createWidget({
|
||||
mergeAllActiveQueries: false,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.getByTestId('chart-manager')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not render ChartManager when queries are merged', () => {
|
||||
const widget = (createWidget({
|
||||
mergeAllActiveQueries: true,
|
||||
}) as unknown) as Widgets;
|
||||
const queryResponse = (createQueryResponse() as unknown) as UseQueryResult<
|
||||
MetricQueryRangeSuccessResponse,
|
||||
Error
|
||||
>;
|
||||
|
||||
render(
|
||||
<HistogramPanel
|
||||
panelMode={PanelMode.DASHBOARD_VIEW}
|
||||
widget={widget}
|
||||
queryResponse={queryResponse}
|
||||
isFullViewMode
|
||||
onToggleModelHandler={jest.fn()}
|
||||
onDragSelect={jest.fn()}
|
||||
/>,
|
||||
);
|
||||
|
||||
expect(screen.queryByTestId('chart-manager')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,231 @@
|
||||
import { histogramBucketSizes } from '@grafana/data';
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import { DEFAULT_BUCKET_COUNT } from 'container/PanelWrapper/constants';
|
||||
import { getLegend } from 'lib/dashboard/getQueryResults';
|
||||
import getLabelName from 'lib/getLabelName';
|
||||
import { DrawStyle } from 'lib/uPlotV2/config/types';
|
||||
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
|
||||
import { Widgets } from 'types/api/dashboard/getAll';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import { AlignedData } from 'uplot';
|
||||
import { incrRoundDn, roundDecimals } from 'utils/round';
|
||||
|
||||
import { PanelMode } from '../types';
|
||||
import { buildBaseConfig } from '../utils/baseConfigBuilder';
|
||||
import {
|
||||
buildHistogramBuckets,
|
||||
mergeAlignedDataTables,
|
||||
prependNullBinToFirstHistogramSeries,
|
||||
replaceUndefinedWithNullInAlignedData,
|
||||
} from '../utils/histogram';
|
||||
|
||||
export interface PrepareHistogramPanelDataParams {
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
bucketWidth?: number;
|
||||
bucketCount?: number;
|
||||
mergeAllActiveQueries?: boolean;
|
||||
}
|
||||
|
||||
const BUCKET_OFFSET = 0;
|
||||
const HIST_SORT = (a: number, b: number): number => a - b;
|
||||
|
||||
function extractNumericValues(
|
||||
result: MetricRangePayloadProps['data']['result'],
|
||||
): number[] {
|
||||
const values: number[] = [];
|
||||
for (const item of result) {
|
||||
for (const [, valueStr] of item.values) {
|
||||
values.push(Number.parseFloat(valueStr) || 0);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
function computeSmallestDelta(sortedValues: number[]): number {
|
||||
if (sortedValues.length <= 1) {
|
||||
return 0;
|
||||
}
|
||||
let smallest = Infinity;
|
||||
for (let i = 1; i < sortedValues.length; i++) {
|
||||
const delta = sortedValues[i] - sortedValues[i - 1];
|
||||
if (delta > 0) {
|
||||
smallest = Math.min(smallest, delta);
|
||||
}
|
||||
}
|
||||
return smallest === Infinity ? 0 : smallest;
|
||||
}
|
||||
|
||||
function selectBucketSize({
|
||||
range,
|
||||
bucketCount,
|
||||
smallestDelta,
|
||||
bucketWidthOverride,
|
||||
}: {
|
||||
range: number;
|
||||
bucketCount: number;
|
||||
smallestDelta: number;
|
||||
bucketWidthOverride?: number;
|
||||
}): number {
|
||||
if (bucketWidthOverride != null && bucketWidthOverride > 0) {
|
||||
return bucketWidthOverride;
|
||||
}
|
||||
const targetSize = range / bucketCount;
|
||||
for (const candidate of histogramBucketSizes) {
|
||||
if (targetSize < candidate && candidate >= smallestDelta) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
function buildFrames(
|
||||
result: MetricRangePayloadProps['data']['result'],
|
||||
mergeAllActiveQueries: boolean,
|
||||
): number[][] {
|
||||
const frames: number[][] = result.map((item) =>
|
||||
item.values.map(([, valueStr]) => Number.parseFloat(valueStr) || 0),
|
||||
);
|
||||
if (mergeAllActiveQueries && frames.length > 1) {
|
||||
const first = frames[0];
|
||||
for (let i = 1; i < frames.length; i++) {
|
||||
first.push(...frames[i]);
|
||||
frames[i] = [];
|
||||
}
|
||||
}
|
||||
return frames;
|
||||
}
|
||||
|
||||
export function prepareHistogramPanelData({
|
||||
apiResponse,
|
||||
bucketWidth,
|
||||
bucketCount: bucketCountProp = DEFAULT_BUCKET_COUNT,
|
||||
mergeAllActiveQueries = false,
|
||||
}: PrepareHistogramPanelDataParams): AlignedData {
|
||||
const bucketCount = bucketCountProp ?? DEFAULT_BUCKET_COUNT;
|
||||
const result = apiResponse.data.result;
|
||||
|
||||
const seriesValues = extractNumericValues(result);
|
||||
if (seriesValues.length === 0) {
|
||||
return [[]];
|
||||
}
|
||||
|
||||
const sorted = [...seriesValues].sort((a, b) => a - b);
|
||||
const min = sorted[0];
|
||||
const max = sorted[sorted.length - 1];
|
||||
const range = max - min;
|
||||
const smallestDelta = computeSmallestDelta(sorted);
|
||||
let bucketSize = selectBucketSize({
|
||||
range,
|
||||
bucketCount,
|
||||
smallestDelta,
|
||||
bucketWidthOverride: bucketWidth,
|
||||
});
|
||||
if (bucketSize <= 0) {
|
||||
bucketSize = range > 0 ? range / bucketCount : 1;
|
||||
}
|
||||
|
||||
const getBucket = (v: number): number =>
|
||||
roundDecimals(incrRoundDn(v - BUCKET_OFFSET, bucketSize) + BUCKET_OFFSET, 9);
|
||||
|
||||
const frames = buildFrames(result, mergeAllActiveQueries);
|
||||
const histogramsPerSeries: AlignedData[] = frames
|
||||
.filter((frame) => frame.length > 0)
|
||||
.map((frame) => buildHistogramBuckets(frame, getBucket, HIST_SORT));
|
||||
|
||||
if (histogramsPerSeries.length === 0) {
|
||||
return [[]];
|
||||
}
|
||||
|
||||
const mergedHistogramData = mergeAlignedDataTables(histogramsPerSeries);
|
||||
replaceUndefinedWithNullInAlignedData(mergedHistogramData);
|
||||
prependNullBinToFirstHistogramSeries(mergedHistogramData, bucketSize);
|
||||
return mergedHistogramData;
|
||||
}
|
||||
|
||||
export function prepareHistogramPanelConfig({
|
||||
widget,
|
||||
apiResponse,
|
||||
panelMode,
|
||||
isDarkMode,
|
||||
}: {
|
||||
widget: Widgets;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
panelMode: PanelMode;
|
||||
isDarkMode: boolean;
|
||||
}): UPlotConfigBuilder {
|
||||
const builder = buildBaseConfig({
|
||||
widget,
|
||||
isDarkMode,
|
||||
apiResponse,
|
||||
panelMode,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
});
|
||||
builder.setCursor({
|
||||
drag: {
|
||||
x: false,
|
||||
y: false,
|
||||
setScale: true,
|
||||
},
|
||||
focus: {
|
||||
prox: 1e3,
|
||||
},
|
||||
});
|
||||
|
||||
builder.addScale({
|
||||
scaleKey: 'x',
|
||||
time: false,
|
||||
auto: true,
|
||||
});
|
||||
builder.addScale({
|
||||
scaleKey: 'y',
|
||||
time: false,
|
||||
auto: true,
|
||||
min: 0,
|
||||
});
|
||||
|
||||
const currentQuery = widget.query;
|
||||
const mergeAllActiveQueries = widget?.mergeAllActiveQueries ?? false;
|
||||
|
||||
// When merged, data has only one y column; add one series to match. Otherwise add one per result.
|
||||
if (mergeAllActiveQueries) {
|
||||
builder.addSeries({
|
||||
label: '',
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: false,
|
||||
barWidthFactor: 1,
|
||||
pointSize: 5,
|
||||
lineColor: '#3f5ecc',
|
||||
fillColor: '#4E74F8',
|
||||
isDarkMode,
|
||||
});
|
||||
} else {
|
||||
apiResponse.data.result.forEach((series) => {
|
||||
const baseLabelName = getLabelName(
|
||||
series.metric,
|
||||
series.queryName || '', // query
|
||||
series.legend || '',
|
||||
);
|
||||
|
||||
const label = currentQuery
|
||||
? getLegend(series, currentQuery, baseLabelName)
|
||||
: baseLabelName;
|
||||
|
||||
builder.addSeries({
|
||||
label: label,
|
||||
scaleKey: 'y',
|
||||
drawStyle: DrawStyle.Bar,
|
||||
panelType: PANEL_TYPES.HISTOGRAM,
|
||||
colorMapping: widget.customLegendColors ?? {},
|
||||
spanGaps: false,
|
||||
barWidthFactor: 1,
|
||||
pointSize: 5,
|
||||
isDarkMode,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
@@ -2,12 +2,12 @@ import { useEffect, useMemo, useRef, useState } from 'react';
|
||||
import TimeSeries from 'container/DashboardContainer/visualization/charts/TimeSeries/TimeSeries';
|
||||
import ChartManager from 'container/DashboardContainer/visualization/components/ChartManager/ChartManager';
|
||||
import { usePanelContextMenu } from 'container/DashboardContainer/visualization/hooks/usePanelContextMenu';
|
||||
import { useScrollWidgetIntoView } from 'container/DashboardContainer/visualization/hooks/useScrollWidgetIntoView';
|
||||
import { PanelWrapperProps } from 'container/PanelWrapper/panelWrapper.types';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
import { useResizeObserver } from 'hooks/useDimensions';
|
||||
import { LegendPosition } from 'lib/uPlotV2/components/types';
|
||||
import { ContextMenu } from 'periscope/components/ContextMenu';
|
||||
import { useDashboard } from 'providers/Dashboard/Dashboard';
|
||||
import { useTimezone } from 'providers/Timezone';
|
||||
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
|
||||
import uPlot from 'uplot';
|
||||
@@ -26,7 +26,6 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
isFullViewMode,
|
||||
onToggleModelHandler,
|
||||
} = props;
|
||||
const { toScrollWidgetId, setToScrollWidgetId } = useDashboard();
|
||||
const graphRef = useRef<HTMLDivElement>(null);
|
||||
const [minTimeScale, setMinTimeScale] = useState<number>();
|
||||
const [maxTimeScale, setMaxTimeScale] = useState<number>();
|
||||
@@ -35,16 +34,7 @@ function TimeSeriesPanel(props: PanelWrapperProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const { timezone } = useTimezone();
|
||||
|
||||
useEffect(() => {
|
||||
if (toScrollWidgetId === widget.id) {
|
||||
graphRef.current?.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'center',
|
||||
});
|
||||
graphRef.current?.focus();
|
||||
setToScrollWidgetId('');
|
||||
}
|
||||
}, [toScrollWidgetId, setToScrollWidgetId, widget.id]);
|
||||
useScrollWidgetIntoView(widget.id, graphRef);
|
||||
|
||||
useEffect((): void => {
|
||||
const { startTime, endTime } = getTimeRange(queryResponse);
|
||||
|
||||
@@ -19,9 +19,9 @@ export interface BaseConfigBuilderProps {
|
||||
widget: Widgets;
|
||||
apiResponse: MetricRangePayloadProps;
|
||||
isDarkMode: boolean;
|
||||
onClick: OnClickPluginOpts['onClick'];
|
||||
onDragSelect: (startTime: number, endTime: number) => void;
|
||||
timezone: Timezone;
|
||||
onClick?: OnClickPluginOpts['onClick'];
|
||||
onDragSelect?: (startTime: number, endTime: number) => void;
|
||||
timezone?: Timezone;
|
||||
panelMode: PanelMode;
|
||||
panelType: PANEL_TYPES;
|
||||
minTimeScale?: number;
|
||||
@@ -40,8 +40,10 @@ export function buildBaseConfig({
|
||||
minTimeScale,
|
||||
maxTimeScale,
|
||||
}: BaseConfigBuilderProps): UPlotConfigBuilder {
|
||||
const tzDate = (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value);
|
||||
const tzDate = timezone
|
||||
? (timestamp: number): Date =>
|
||||
uPlot.tzDate(new Date(timestamp * 1e3), timezone.value)
|
||||
: undefined;
|
||||
|
||||
const builder = new UPlotConfigBuilder({
|
||||
onDragSelect,
|
||||
|
||||
@@ -0,0 +1,225 @@
|
||||
import {
|
||||
NULL_EXPAND,
|
||||
NULL_REMOVE,
|
||||
NULL_RETAIN,
|
||||
} from 'container/PanelWrapper/constants';
|
||||
import { AlignedData } from 'uplot';
|
||||
|
||||
/**
|
||||
* Expands contiguous runs of `null` values to the left and right of their
|
||||
* original positions so that visual gaps in the series are continuous.
|
||||
*
|
||||
* This is used when `NULL_EXPAND` mode is selected while joining series.
|
||||
*/
|
||||
function propagateNullsAcrossNeighbors(
|
||||
seriesValues: Array<number | null>,
|
||||
nullIndices: number[],
|
||||
alignedLength: number,
|
||||
): void {
|
||||
for (
|
||||
let i = 0, currentIndex, lastExpandedNullIndex = -1;
|
||||
i < nullIndices.length;
|
||||
i++
|
||||
) {
|
||||
const nullIndex = nullIndices[i];
|
||||
|
||||
if (nullIndex > lastExpandedNullIndex) {
|
||||
// expand left until we hit a non-null value
|
||||
currentIndex = nullIndex - 1;
|
||||
while (currentIndex >= 0 && seriesValues[currentIndex] == null) {
|
||||
seriesValues[currentIndex--] = null;
|
||||
}
|
||||
|
||||
// expand right until we hit a non-null value
|
||||
currentIndex = nullIndex + 1;
|
||||
while (currentIndex < alignedLength && seriesValues[currentIndex] == null) {
|
||||
seriesValues[(lastExpandedNullIndex = currentIndex++)] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merges multiple uPlot `AlignedData` tables into a single aligned table.
|
||||
*
|
||||
* - Merges and sorts all distinct x-values from each table.
|
||||
* - Re-aligns every series onto the merged x-axis.
|
||||
* - Applies per-series null handling (`NULL_REMOVE`, `NULL_RETAIN`, `NULL_EXPAND`).
|
||||
*/
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
export function mergeAlignedDataTables(
|
||||
alignedTables: AlignedData[],
|
||||
nullModes?: number[][],
|
||||
): AlignedData {
|
||||
let mergedXValues: Set<number>;
|
||||
|
||||
// eslint-disable-next-line prefer-const
|
||||
mergedXValues = new Set();
|
||||
|
||||
// Collect all unique x-values from every table.
|
||||
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
|
||||
const table = alignedTables[tableIndex];
|
||||
const xValues = table[0];
|
||||
const xLength = xValues.length;
|
||||
|
||||
for (let i = 0; i < xLength; i++) {
|
||||
mergedXValues.add(xValues[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Sorted, merged x-axis used by the final result.
|
||||
const alignedData: (number | null | undefined)[][] = [
|
||||
Array.from(mergedXValues).sort((a, b) => a - b),
|
||||
];
|
||||
|
||||
const alignedLength = alignedData[0].length;
|
||||
|
||||
// Map from x-value to its index in the merged x-axis.
|
||||
const xValueToIndexMap = new Map<number, number>();
|
||||
|
||||
for (let i = 0; i < alignedLength; i++) {
|
||||
xValueToIndexMap.set(alignedData[0][i] as number, i);
|
||||
}
|
||||
|
||||
// Re-align all series from all tables onto the merged x-axis.
|
||||
for (let tableIndex = 0; tableIndex < alignedTables.length; tableIndex++) {
|
||||
const table = alignedTables[tableIndex];
|
||||
const xValues = table[0];
|
||||
|
||||
for (let seriesIndex = 1; seriesIndex < table.length; seriesIndex++) {
|
||||
const seriesValues = table[seriesIndex];
|
||||
|
||||
const alignedSeriesValues = Array(alignedLength).fill(undefined);
|
||||
|
||||
const nullHandlingMode = nullModes
|
||||
? nullModes[tableIndex][seriesIndex]
|
||||
: NULL_RETAIN;
|
||||
|
||||
const nullIndices: number[] = [];
|
||||
|
||||
for (let i = 0; i < seriesValues.length; i++) {
|
||||
const valueAtPoint = seriesValues[i];
|
||||
const alignedIndex = xValueToIndexMap.get(xValues[i]);
|
||||
|
||||
if (alignedIndex == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (valueAtPoint === null) {
|
||||
if (nullHandlingMode !== NULL_REMOVE) {
|
||||
alignedSeriesValues[alignedIndex] = valueAtPoint;
|
||||
|
||||
if (nullHandlingMode === NULL_EXPAND) {
|
||||
nullIndices.push(alignedIndex);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
alignedSeriesValues[alignedIndex] = valueAtPoint;
|
||||
}
|
||||
}
|
||||
|
||||
// Optionally expand nulls to visually preserve gaps.
|
||||
propagateNullsAcrossNeighbors(
|
||||
alignedSeriesValues,
|
||||
nullIndices,
|
||||
alignedLength,
|
||||
);
|
||||
|
||||
alignedData.push(alignedSeriesValues);
|
||||
}
|
||||
}
|
||||
|
||||
return alignedData as AlignedData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds histogram buckets from raw values.
|
||||
*
|
||||
* - Each value is mapped into a bucket via `getBucketForValue`.
|
||||
* - Counts how many values fall into each bucket.
|
||||
* - Optionally sorts buckets using the provided comparator.
|
||||
*/
|
||||
export function buildHistogramBuckets(
|
||||
values: number[],
|
||||
getBucketForValue: (value: number) => number,
|
||||
sortBuckets?: ((a: number, b: number) => number) | null,
|
||||
): AlignedData {
|
||||
const bucketMap = new Map<number, { value: number; count: number }>();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
let value = values[i];
|
||||
|
||||
if (value != null) {
|
||||
value = getBucketForValue(value);
|
||||
}
|
||||
|
||||
const bucket = bucketMap.get(value);
|
||||
|
||||
if (bucket) {
|
||||
bucket.count++;
|
||||
} else {
|
||||
bucketMap.set(value, { value, count: 1 });
|
||||
}
|
||||
}
|
||||
|
||||
const buckets = [...bucketMap.values()];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-expressions
|
||||
sortBuckets && buckets.sort((a, b) => sortBuckets(a.value, b.value));
|
||||
|
||||
const bucketValues = Array(buckets.length);
|
||||
const bucketCounts = Array(buckets.length);
|
||||
|
||||
for (let i = 0; i < buckets.length; i++) {
|
||||
bucketValues[i] = buckets[i].value;
|
||||
bucketCounts[i] = buckets[i].count;
|
||||
}
|
||||
|
||||
return [bucketValues, bucketCounts];
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates an `AlignedData` instance, replacing all `undefined` entries
|
||||
* with explicit `null` values so uPlot treats them as gaps.
|
||||
*/
|
||||
export function replaceUndefinedWithNullInAlignedData(
|
||||
data: AlignedData,
|
||||
): AlignedData {
|
||||
const seriesList = data as (number | null | undefined)[][];
|
||||
for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex++) {
|
||||
for (
|
||||
let pointIndex = 0;
|
||||
pointIndex < seriesList[seriesIndex].length;
|
||||
pointIndex++
|
||||
) {
|
||||
if (seriesList[seriesIndex][pointIndex] === undefined) {
|
||||
seriesList[seriesIndex][pointIndex] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the first histogram series has a leading "empty" bin so that
|
||||
* all series line up visually when rendered as bars.
|
||||
*
|
||||
* - Prepends a new x-value (first x - `bucketSize`) to the first series.
|
||||
* - Prepends `null` to all subsequent series at the same index.
|
||||
*/
|
||||
export function prependNullBinToFirstHistogramSeries(
|
||||
alignedData: AlignedData,
|
||||
bucketSize: number,
|
||||
): void {
|
||||
const seriesList = alignedData as (number | null)[][];
|
||||
if (
|
||||
seriesList.length > 0 &&
|
||||
seriesList[0].length > 0 &&
|
||||
seriesList[0][0] !== null
|
||||
) {
|
||||
seriesList[0].unshift(seriesList[0][0] - bucketSize);
|
||||
for (let seriesIndex = 1; seriesIndex < seriesList.length; seriesIndex++) {
|
||||
seriesList[seriesIndex].unshift(null);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import {
|
||||
Button,
|
||||
Empty,
|
||||
@@ -10,22 +9,24 @@ import {
|
||||
Popover,
|
||||
Spin,
|
||||
} from 'antd';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import {
|
||||
convertExpressionToFilters,
|
||||
convertFiltersToExpression,
|
||||
} from 'components/QueryBuilderV2/utils';
|
||||
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
|
||||
import { useGetMetricsListFilterValues } from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
|
||||
import useDebouncedFn from 'hooks/useDebouncedFunction';
|
||||
import { Search } from 'lucide-react';
|
||||
import { DataTypes } from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { SUMMARY_FILTERS_KEY } from './constants';
|
||||
|
||||
function MetricNameSearch({
|
||||
queryFilters,
|
||||
queryFilterExpression,
|
||||
onFilterChange,
|
||||
}: {
|
||||
queryFilters: TagFilter;
|
||||
queryFilterExpression: Filter;
|
||||
onFilterChange: (value: string) => void;
|
||||
}): JSX.Element {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||
const [searchString, setSearchString] = useState<string>('');
|
||||
const [debouncedSearchString, setDebouncedSearchString] = useState<string>('');
|
||||
@@ -67,9 +68,12 @@ function MetricNameSearch({
|
||||
|
||||
const handleSelect = useCallback(
|
||||
(selectedMetricName: string): void => {
|
||||
const queryFilters = convertExpressionToFilters(
|
||||
queryFilterExpression?.expression,
|
||||
);
|
||||
const newFilters = {
|
||||
items: [
|
||||
...queryFilters.items,
|
||||
...queryFilters,
|
||||
{
|
||||
id: 'metric_name',
|
||||
op: 'CONTAINS',
|
||||
@@ -83,13 +87,11 @@ function MetricNameSearch({
|
||||
],
|
||||
op: 'and',
|
||||
};
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
|
||||
});
|
||||
const newFilterExpression = convertFiltersToExpression(newFilters);
|
||||
onFilterChange(newFilterExpression.expression);
|
||||
setIsPopoverOpen(false);
|
||||
},
|
||||
[queryFilters.items, setSearchParams, searchParams],
|
||||
[queryFilterExpression, onFilterChange],
|
||||
);
|
||||
|
||||
const metricNameFilterValues = useMemo(
|
||||
|
||||
@@ -1,23 +1,19 @@
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import { Button, Menu, Popover, Tooltip } from 'antd';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { convertFiltersToExpression } from 'components/QueryBuilderV2/utils';
|
||||
import { Search } from 'lucide-react';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import {
|
||||
METRIC_TYPE_LABEL_MAP,
|
||||
METRIC_TYPE_VALUES_MAP,
|
||||
SUMMARY_FILTERS_KEY,
|
||||
} from './constants';
|
||||
import { METRIC_TYPE_LABEL_MAP_V2 } from './constants';
|
||||
|
||||
function MetricTypeSearch({
|
||||
queryFilters,
|
||||
onFilterChange,
|
||||
}: {
|
||||
queryFilters: TagFilter;
|
||||
onFilterChange: (expression: string) => void;
|
||||
}): JSX.Element {
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
|
||||
const [isPopoverOpen, setIsPopoverOpen] = useState<boolean>(false);
|
||||
|
||||
const menuItems = useMemo(
|
||||
@@ -26,9 +22,9 @@ function MetricTypeSearch({
|
||||
key: 'all',
|
||||
value: 'All',
|
||||
},
|
||||
...Object.keys(METRIC_TYPE_LABEL_MAP).map((key) => ({
|
||||
key: METRIC_TYPE_VALUES_MAP[key as MetricType],
|
||||
value: METRIC_TYPE_LABEL_MAP[key as MetricType],
|
||||
...Object.keys(METRIC_TYPE_LABEL_MAP_V2).map((key) => ({
|
||||
key: METRIC_TYPE_LABEL_MAP_V2[key as MetrictypesTypeDTO],
|
||||
value: METRIC_TYPE_LABEL_MAP_V2[key as MetrictypesTypeDTO],
|
||||
})),
|
||||
],
|
||||
[],
|
||||
@@ -36,16 +32,17 @@ function MetricTypeSearch({
|
||||
|
||||
const handleSelect = useCallback(
|
||||
(selectedMetricType: string): void => {
|
||||
let newFilters;
|
||||
if (selectedMetricType !== 'all') {
|
||||
const newFilters = {
|
||||
newFilters = {
|
||||
items: [
|
||||
...queryFilters.items,
|
||||
{
|
||||
id: 'metric_type',
|
||||
id: 'ttype',
|
||||
op: '=',
|
||||
key: {
|
||||
id: 'metric_type',
|
||||
key: 'metric_type',
|
||||
id: 'type',
|
||||
key: 'type',
|
||||
type: 'tag',
|
||||
},
|
||||
value: selectedMetricType,
|
||||
@@ -53,23 +50,17 @@ function MetricTypeSearch({
|
||||
],
|
||||
op: 'AND',
|
||||
};
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
|
||||
});
|
||||
} else {
|
||||
const newFilters = {
|
||||
items: queryFilters.items.filter((item) => item.id !== 'metric_type'),
|
||||
newFilters = {
|
||||
items: queryFilters.items.filter((item) => item.id !== 'type'),
|
||||
op: 'AND',
|
||||
};
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(newFilters),
|
||||
});
|
||||
}
|
||||
const newFilterExpression = convertFiltersToExpression(newFilters);
|
||||
onFilterChange(newFilterExpression.expression);
|
||||
setIsPopoverOpen(false);
|
||||
},
|
||||
[queryFilters.items, setSearchParams, searchParams],
|
||||
[queryFilters.items, onFilterChange],
|
||||
);
|
||||
|
||||
const menu = (
|
||||
|
||||
@@ -1,27 +1,59 @@
|
||||
import { Tooltip } from 'antd';
|
||||
import QueryBuilderSearch from 'container/QueryBuilder/filters/QueryBuilderSearch';
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Button, Tooltip } from 'antd';
|
||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||
import DateTimeSelectionV2 from 'container/TopNav/DateTimeSelectionV2';
|
||||
import { HardHat, Info } from 'lucide-react';
|
||||
import { Info, Play } from 'lucide-react';
|
||||
import { DataSource } from 'types/common/queryBuilder';
|
||||
|
||||
import { MetricsSearchProps } from './types';
|
||||
|
||||
function MetricsSearch({ query, onChange }: MetricsSearchProps): JSX.Element {
|
||||
const [currentExpression, setCurrentExpression] = useState<string>(
|
||||
query?.filter?.expression || '',
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const expression = query?.filter?.expression || '';
|
||||
setCurrentExpression(expression);
|
||||
}, [query?.filter?.expression]);
|
||||
|
||||
const handleOnChange = (expression: string): void => {
|
||||
setCurrentExpression(expression);
|
||||
};
|
||||
|
||||
const handleStageAndRunQuery = (): void => onChange(currentExpression);
|
||||
|
||||
return (
|
||||
<div className="metrics-search-container">
|
||||
<div className="qb-search-container">
|
||||
<div data-testid="qb-search-container" className="qb-search-container">
|
||||
<Tooltip
|
||||
title="Use filters to refine metrics based on attributes. Example: service_name=api - Shows all metrics associated with the API service"
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
<QueryBuilderSearch
|
||||
query={query}
|
||||
onChange={onChange}
|
||||
suffixIcon={<HardHat size={16} />}
|
||||
isMetricsExplorer
|
||||
<QuerySearch
|
||||
onChange={handleOnChange}
|
||||
dataSource={DataSource.METRICS}
|
||||
queryData={{
|
||||
...query,
|
||||
filter: {
|
||||
...query?.filter,
|
||||
expression: currentExpression,
|
||||
},
|
||||
}}
|
||||
onRun={handleOnChange}
|
||||
showFilterSuggestionsWithoutMetric
|
||||
/>
|
||||
</div>
|
||||
<Button
|
||||
type="primary"
|
||||
onClick={handleStageAndRunQuery}
|
||||
className="stage-run-query"
|
||||
icon={<Play size={14} />}
|
||||
>
|
||||
Stage & Run Query
|
||||
</Button>
|
||||
<div className="metrics-search-options">
|
||||
<DateTimeSelectionV2
|
||||
showAutoRefresh={false}
|
||||
|
||||
@@ -24,7 +24,8 @@ function MetricsTable({
|
||||
setOrderBy,
|
||||
totalCount,
|
||||
openMetricDetails,
|
||||
queryFilters,
|
||||
queryFilterExpression,
|
||||
onFilterChange,
|
||||
}: MetricsTableProps): JSX.Element {
|
||||
const handleTableChange: TableProps<MetricsListItemRowData>['onChange'] = useCallback(
|
||||
(
|
||||
@@ -36,13 +37,17 @@ function MetricsTable({
|
||||
): void => {
|
||||
if ('field' in sorter && sorter.order) {
|
||||
setOrderBy({
|
||||
columnName: sorter.field as string,
|
||||
order: sorter.order === 'ascend' ? 'asc' : 'desc',
|
||||
key: {
|
||||
name: sorter.field as string,
|
||||
},
|
||||
direction: sorter.order === 'ascend' ? 'asc' : 'desc',
|
||||
});
|
||||
} else {
|
||||
setOrderBy({
|
||||
columnName: 'samples',
|
||||
order: 'desc',
|
||||
key: {
|
||||
name: 'samples',
|
||||
},
|
||||
direction: 'desc',
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -51,19 +56,17 @@ function MetricsTable({
|
||||
|
||||
return (
|
||||
<div className="metrics-table-container">
|
||||
{!isError && !isLoading && (
|
||||
<div className="metrics-table-title" data-testid="metrics-table-title">
|
||||
<Typography.Title level={4} className="metrics-table-title">
|
||||
List View
|
||||
</Typography.Title>
|
||||
<Tooltip
|
||||
title="The table displays all metrics in the selected time range. Each row represents a unique metric, and its metric name, and metadata like description, type, unit, and samples/timeseries cardinality observed in the selected time range."
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
<div className="metrics-table-title" data-testid="metrics-table-title">
|
||||
<Typography.Title level={4} className="metrics-table-title">
|
||||
List View
|
||||
</Typography.Title>
|
||||
<Tooltip
|
||||
title="The table displays all metrics in the selected time range. Each row represents a unique metric, and its metric name, and metadata like description, type, unit, and samples/timeseries cardinality observed in the selected time range."
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Table
|
||||
loading={{
|
||||
spinning: isLoading,
|
||||
@@ -75,7 +78,7 @@ function MetricsTable({
|
||||
),
|
||||
}}
|
||||
dataSource={data}
|
||||
columns={getMetricsTableColumns(queryFilters)}
|
||||
columns={getMetricsTableColumns(queryFilterExpression, onFilterChange)}
|
||||
locale={{
|
||||
emptyText: isLoading ? null : (
|
||||
<div
|
||||
|
||||
@@ -3,6 +3,7 @@ import { useWindowSize } from 'react-use';
|
||||
import { Group } from '@visx/group';
|
||||
import { Treemap } from '@visx/hierarchy';
|
||||
import { Empty, Select, Skeleton, Tooltip, Typography } from 'antd';
|
||||
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { stratify, treemapBinary } from 'd3-hierarchy';
|
||||
import { Info } from 'lucide-react';
|
||||
|
||||
@@ -12,7 +13,7 @@ import {
|
||||
TREEMAP_SQUARE_PADDING,
|
||||
TREEMAP_VIEW_OPTIONS,
|
||||
} from './constants';
|
||||
import { MetricsTreemapProps, TreemapTile, TreemapViewType } from './types';
|
||||
import { MetricsTreemapProps, TreemapTile } from './types';
|
||||
import {
|
||||
getTreemapTileStyle,
|
||||
getTreemapTileTextStyle,
|
||||
@@ -40,9 +41,9 @@ function MetricsTreemap({
|
||||
|
||||
const treemapData = useMemo(() => {
|
||||
const extracedTreemapData =
|
||||
(viewType === TreemapViewType.TIMESERIES
|
||||
? data?.data?.[TreemapViewType.TIMESERIES]
|
||||
: data?.data?.[TreemapViewType.SAMPLES]) || [];
|
||||
(viewType === MetricsexplorertypesTreemapModeDTO.timeseries
|
||||
? data?.timeseries
|
||||
: data?.samples) || [];
|
||||
return transformTreemapData(extracedTreemapData, viewType);
|
||||
}, [data, viewType]);
|
||||
|
||||
@@ -54,62 +55,36 @@ function MetricsTreemap({
|
||||
const xMax = treemapWidth - TREEMAP_MARGINS.LEFT - TREEMAP_MARGINS.RIGHT;
|
||||
const yMax = TREEMAP_HEIGHT - TREEMAP_MARGINS.TOP - TREEMAP_MARGINS.BOTTOM;
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div data-testid="metrics-treemap-loading-state">
|
||||
<Skeleton
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT + 55 }}
|
||||
active
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
!data ||
|
||||
!data.data ||
|
||||
(data?.status === 'success' && !data?.data?.[viewType])
|
||||
) {
|
||||
return (
|
||||
<Empty
|
||||
description="No metrics found"
|
||||
data-testid="metrics-treemap-empty-state"
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (data?.status === 'error' || isError) {
|
||||
return (
|
||||
<Empty
|
||||
description="Error fetching metrics. If the problem persists, please contact support."
|
||||
data-testid="metrics-treemap-error-state"
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className="metrics-treemap-container"
|
||||
data-testid="metrics-treemap-container"
|
||||
>
|
||||
<div className="metrics-treemap-title">
|
||||
<div className="metrics-treemap-title-left">
|
||||
<Typography.Title level={4}>Proportion View</Typography.Title>
|
||||
<Tooltip
|
||||
title="The treemap displays the proportion of samples/timeseries in the selected time range. Each tile represents a unique metric, and its size indicates the percentage of samples/timeseries it contributes to the total."
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
const treemapContent = useMemo(() => {
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div data-testid="metrics-treemap-loading-state">
|
||||
<Skeleton style={{ width: treemapWidth, height: TREEMAP_HEIGHT }} active />
|
||||
</div>
|
||||
<Select
|
||||
options={TREEMAP_VIEW_OPTIONS}
|
||||
value={viewType}
|
||||
onChange={setHeatmapView}
|
||||
);
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<Empty
|
||||
description="Error fetching metrics. If the problem persists, please contact support."
|
||||
data-testid="metrics-treemap-error-state"
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!data || !data?.[viewType]?.length) {
|
||||
return (
|
||||
<Empty
|
||||
description="No metrics found"
|
||||
data-testid="metrics-treemap-empty-state"
|
||||
style={{ width: treemapWidth, height: TREEMAP_HEIGHT, paddingTop: 30 }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<svg
|
||||
width={treemapWidth}
|
||||
height={TREEMAP_HEIGHT}
|
||||
@@ -174,6 +149,42 @@ function MetricsTreemap({
|
||||
)}
|
||||
</Treemap>
|
||||
</svg>
|
||||
);
|
||||
}, [
|
||||
data,
|
||||
isError,
|
||||
isLoading,
|
||||
openMetricDetails,
|
||||
transformedTreemapData,
|
||||
treemapWidth,
|
||||
viewType,
|
||||
xMax,
|
||||
yMax,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="metrics-treemap-container"
|
||||
data-testid="metrics-treemap-container"
|
||||
>
|
||||
<div className="metrics-treemap-title">
|
||||
<div className="metrics-treemap-title-left">
|
||||
<Typography.Title level={4}>Proportion View</Typography.Title>
|
||||
<Tooltip
|
||||
title="The treemap displays the proportion of samples/timeseries in the selected time range. Each tile represents a unique metric, and its size indicates the percentage of samples/timeseries it contributes to the total."
|
||||
placement="right"
|
||||
>
|
||||
<Info size={16} />
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Select
|
||||
options={TREEMAP_VIEW_OPTIONS}
|
||||
value={viewType}
|
||||
onChange={setHeatmapView}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
</div>
|
||||
{treemapContent}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
.metrics-search-container {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
align-items: center;
|
||||
|
||||
.metrics-search-options {
|
||||
display: flex;
|
||||
|
||||
@@ -4,11 +4,23 @@ import { useSelector } from 'react-redux';
|
||||
import { useSearchParams } from 'react-router-dom-v5-compat';
|
||||
import * as Sentry from '@sentry/react';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import { initialQueriesMap } from 'constants/queryBuilder';
|
||||
import {
|
||||
useGetMetricsStats,
|
||||
useGetMetricsTreemap,
|
||||
} from 'api/generated/services/metrics';
|
||||
import {
|
||||
MetricsexplorertypesStatsRequestDTO,
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetricsexplorertypesTreemapRequestDTO,
|
||||
Querybuildertypesv5OrderByDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import {
|
||||
convertExpressionToFilters,
|
||||
convertFiltersToExpression,
|
||||
} from 'components/QueryBuilderV2/utils';
|
||||
import { usePageSize } from 'container/InfraMonitoringK8s/utils';
|
||||
import NoLogs from 'container/NoLogs/NoLogs';
|
||||
import { useGetMetricsList } from 'hooks/metricsExplorer/useGetMetricsList';
|
||||
import { useGetMetricsTreeMap } from 'hooks/metricsExplorer/useGetMetricsTreeMap';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import ErrorBoundaryFallback from 'pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -23,32 +35,38 @@ import {
|
||||
IS_INSPECT_MODAL_OPEN_KEY,
|
||||
IS_METRIC_DETAILS_OPEN_KEY,
|
||||
SELECTED_METRIC_NAME_KEY,
|
||||
SUMMARY_FILTERS_KEY,
|
||||
} from './constants';
|
||||
import MetricsSearch from './MetricsSearch';
|
||||
import MetricsTable from './MetricsTable';
|
||||
import MetricsTreemap from './MetricsTreemap';
|
||||
import { OrderByPayload, TreemapViewType } from './types';
|
||||
import {
|
||||
convertNanoToMilliseconds,
|
||||
formatDataForMetricsTable,
|
||||
getMetricsListQuery,
|
||||
} from './utils';
|
||||
import { convertNanoToMilliseconds, formatDataForMetricsTable } from './utils';
|
||||
|
||||
import './Summary.styles.scss';
|
||||
|
||||
const DEFAULT_ORDER_BY: OrderByPayload = {
|
||||
columnName: 'samples',
|
||||
order: 'desc',
|
||||
const DEFAULT_ORDER_BY: Querybuildertypesv5OrderByDTO = {
|
||||
key: {
|
||||
name: 'samples',
|
||||
},
|
||||
direction: 'desc',
|
||||
};
|
||||
|
||||
function Summary(): JSX.Element {
|
||||
const { pageSize, setPageSize } = usePageSize('metricsExplorer');
|
||||
const [currentPage, setCurrentPage] = useState(1);
|
||||
const [orderBy, setOrderBy] = useState<OrderByPayload>(DEFAULT_ORDER_BY);
|
||||
const [heatmapView, setHeatmapView] = useState<TreemapViewType>(
|
||||
TreemapViewType.TIMESERIES,
|
||||
const [orderBy, setOrderBy] = useState<Querybuildertypesv5OrderByDTO>(
|
||||
DEFAULT_ORDER_BY,
|
||||
);
|
||||
const [
|
||||
heatmapView,
|
||||
setHeatmapView,
|
||||
] = useState<MetricsexplorertypesTreemapModeDTO>(
|
||||
MetricsexplorertypesTreemapModeDTO.timeseries,
|
||||
);
|
||||
|
||||
const { currentQuery, redirectWithQueryBuilderData } = useQueryBuilder();
|
||||
const query = useMemo(() => currentQuery?.builder?.queryData[0], [
|
||||
currentQuery,
|
||||
]);
|
||||
|
||||
const [searchParams, setSearchParams] = useSearchParams();
|
||||
const [isMetricDetailsOpen, setIsMetricDetailsOpen] = useState(
|
||||
@@ -65,17 +83,6 @@ function Summary(): JSX.Element {
|
||||
(state) => state.globalTime,
|
||||
);
|
||||
|
||||
const queryFilters: TagFilter = useMemo(() => {
|
||||
const encodedFilters = searchParams.get(SUMMARY_FILTERS_KEY);
|
||||
if (encodedFilters) {
|
||||
return JSON.parse(encodedFilters);
|
||||
}
|
||||
return {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
}, [searchParams]);
|
||||
|
||||
useEffect(() => {
|
||||
logEvent(MetricsExplorerEvents.TabChanged, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -87,105 +94,112 @@ function Summary(): JSX.Element {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
// This is used to avoid the filters from being serialized with the id
|
||||
const queryFiltersWithoutId = useMemo(() => {
|
||||
const filtersWithoutId = {
|
||||
...queryFilters,
|
||||
items: queryFilters.items.map(({ id: _id, ...rest }) => rest),
|
||||
};
|
||||
return JSON.stringify(filtersWithoutId);
|
||||
}, [queryFilters]);
|
||||
const queryFilterExpression = useMemo(() => {
|
||||
const filters = query?.filters || { items: [], op: 'AND' };
|
||||
return convertFiltersToExpression(filters);
|
||||
}, [query?.filters]);
|
||||
|
||||
const metricsListQuery = useMemo(() => {
|
||||
const baseQuery = getMetricsListQuery();
|
||||
const metricsListQuery: MetricsexplorertypesStatsRequestDTO = useMemo(() => {
|
||||
return {
|
||||
...baseQuery,
|
||||
limit: pageSize,
|
||||
offset: (currentPage - 1) * pageSize,
|
||||
filters: queryFilters,
|
||||
start: convertNanoToMilliseconds(minTime),
|
||||
end: convertNanoToMilliseconds(maxTime),
|
||||
limit: pageSize,
|
||||
offset: (currentPage - 1) * pageSize,
|
||||
orderBy,
|
||||
filter: {
|
||||
expression: queryFilterExpression.expression,
|
||||
},
|
||||
};
|
||||
}, [queryFilters, minTime, maxTime, orderBy, pageSize, currentPage]);
|
||||
}, [
|
||||
minTime,
|
||||
maxTime,
|
||||
orderBy,
|
||||
pageSize,
|
||||
currentPage,
|
||||
queryFilterExpression.expression,
|
||||
]);
|
||||
|
||||
const metricsTreemapQuery = useMemo(
|
||||
const metricsTreemapQuery: MetricsexplorertypesTreemapRequestDTO = useMemo(
|
||||
() => ({
|
||||
limit: 100,
|
||||
filters: queryFilters,
|
||||
treemap: heatmapView,
|
||||
start: convertNanoToMilliseconds(minTime),
|
||||
end: convertNanoToMilliseconds(maxTime),
|
||||
mode: heatmapView,
|
||||
filter: {
|
||||
expression: queryFilterExpression.expression,
|
||||
},
|
||||
}),
|
||||
[queryFilters, heatmapView, minTime, maxTime],
|
||||
[heatmapView, minTime, maxTime, queryFilterExpression.expression],
|
||||
);
|
||||
|
||||
const {
|
||||
data: metricsData,
|
||||
isLoading: isMetricsLoading,
|
||||
isFetching: isMetricsFetching,
|
||||
isError: isMetricsError,
|
||||
} = useGetMetricsList(metricsListQuery, {
|
||||
enabled: !!metricsListQuery && !isInspectModalOpen,
|
||||
queryKey: [
|
||||
'metricsList',
|
||||
queryFiltersWithoutId,
|
||||
orderBy,
|
||||
pageSize,
|
||||
currentPage,
|
||||
minTime,
|
||||
maxTime,
|
||||
],
|
||||
});
|
||||
mutate: getMetricsStats,
|
||||
isLoading: isGetMetricsStatsLoading,
|
||||
isError: isGetMetricsStatsError,
|
||||
} = useGetMetricsStats();
|
||||
|
||||
const isListViewError = useMemo(
|
||||
() => isMetricsError || !!(metricsData && metricsData.statusCode !== 200),
|
||||
[isMetricsError, metricsData],
|
||||
() => isGetMetricsStatsError || metricsData?.status !== 200,
|
||||
[isGetMetricsStatsError, metricsData],
|
||||
);
|
||||
|
||||
const {
|
||||
data: treeMapData,
|
||||
isLoading: isTreeMapLoading,
|
||||
isFetching: isTreeMapFetching,
|
||||
isError: isTreeMapError,
|
||||
} = useGetMetricsTreeMap(metricsTreemapQuery, {
|
||||
enabled: !!metricsTreemapQuery && !isInspectModalOpen,
|
||||
queryKey: [
|
||||
'metricsTreemap',
|
||||
queryFiltersWithoutId,
|
||||
heatmapView,
|
||||
minTime,
|
||||
maxTime,
|
||||
],
|
||||
});
|
||||
mutate: getMetricsTreemap,
|
||||
isLoading: isGetMetricsTreemapLoading,
|
||||
isError: isGetMetricsTreemapError,
|
||||
} = useGetMetricsTreemap();
|
||||
|
||||
useEffect(() => {
|
||||
getMetricsStats({
|
||||
data: metricsListQuery,
|
||||
});
|
||||
getMetricsTreemap({
|
||||
data: metricsTreemapQuery,
|
||||
});
|
||||
}, [
|
||||
metricsTreemapQuery,
|
||||
metricsListQuery,
|
||||
getMetricsTreemap,
|
||||
getMetricsStats,
|
||||
]);
|
||||
|
||||
const isProportionViewError = useMemo(
|
||||
() => isTreeMapError || treeMapData?.statusCode !== 200,
|
||||
[isTreeMapError, treeMapData],
|
||||
() => isGetMetricsTreemapError || treeMapData?.status !== 200,
|
||||
[isGetMetricsTreemapError, treeMapData],
|
||||
);
|
||||
|
||||
const handleFilterChange = useCallback(
|
||||
(value: TagFilter) => {
|
||||
setSearchParams({
|
||||
...Object.fromEntries(searchParams.entries()),
|
||||
[SUMMARY_FILTERS_KEY]: JSON.stringify(value),
|
||||
(expression: string) => {
|
||||
const newFilters: TagFilter = {
|
||||
items: convertExpressionToFilters(expression),
|
||||
op: 'AND',
|
||||
};
|
||||
redirectWithQueryBuilderData({
|
||||
...currentQuery,
|
||||
builder: {
|
||||
...currentQuery.builder,
|
||||
queryData: [
|
||||
{
|
||||
...currentQuery.builder.queryData[0],
|
||||
filters: newFilters,
|
||||
filter: {
|
||||
expression,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
setCurrentPage(1);
|
||||
if (value.items.length > 0) {
|
||||
if (expression) {
|
||||
logEvent(MetricsExplorerEvents.FilterApplied, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
});
|
||||
}
|
||||
},
|
||||
[setSearchParams, searchParams],
|
||||
);
|
||||
|
||||
const searchQuery = useMemo(
|
||||
() => ({
|
||||
...initialQueriesMap.metrics.builder.queryData[0],
|
||||
filters: queryFilters,
|
||||
}),
|
||||
[queryFilters],
|
||||
[currentQuery, redirectWithQueryBuilderData],
|
||||
);
|
||||
|
||||
const onPaginationChange = (page: number, pageSize: number): void => {
|
||||
@@ -202,7 +216,7 @@ function Summary(): JSX.Element {
|
||||
};
|
||||
|
||||
const formattedMetricsData = useMemo(
|
||||
() => formatDataForMetricsTable(metricsData?.payload?.data?.metrics || []),
|
||||
() => formatDataForMetricsTable(metricsData?.data?.data?.metrics || []),
|
||||
[metricsData],
|
||||
);
|
||||
|
||||
@@ -254,7 +268,9 @@ function Summary(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const handleSetHeatmapView = (view: TreemapViewType): void => {
|
||||
const handleSetHeatmapView = (
|
||||
view: MetricsexplorertypesTreemapModeDTO,
|
||||
): void => {
|
||||
setHeatmapView(view);
|
||||
logEvent(MetricsExplorerEvents.TreemapViewChanged, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
@@ -262,63 +278,71 @@ function Summary(): JSX.Element {
|
||||
});
|
||||
};
|
||||
|
||||
const handleSetOrderBy = (orderBy: OrderByPayload): void => {
|
||||
const handleSetOrderBy = (orderBy: Querybuildertypesv5OrderByDTO): void => {
|
||||
setOrderBy(orderBy);
|
||||
logEvent(MetricsExplorerEvents.OrderByApplied, {
|
||||
[MetricsExplorerEventKeys.Tab]: 'summary',
|
||||
[MetricsExplorerEventKeys.ColumnName]: orderBy.columnName,
|
||||
[MetricsExplorerEventKeys.Order]: orderBy.order,
|
||||
[MetricsExplorerEventKeys.ColumnName]: orderBy.key?.name,
|
||||
[MetricsExplorerEventKeys.Order]: orderBy.direction,
|
||||
});
|
||||
};
|
||||
|
||||
const isMetricsListDataEmpty = useMemo(
|
||||
() =>
|
||||
formattedMetricsData.length === 0 && !isMetricsLoading && !isMetricsFetching,
|
||||
[formattedMetricsData, isMetricsLoading, isMetricsFetching],
|
||||
() => formattedMetricsData.length === 0 && !isGetMetricsStatsLoading,
|
||||
[formattedMetricsData, isGetMetricsStatsLoading],
|
||||
);
|
||||
|
||||
const isMetricsTreeMapDataEmpty = useMemo(
|
||||
() =>
|
||||
!treeMapData?.payload?.data[heatmapView]?.length &&
|
||||
!isTreeMapLoading &&
|
||||
!isTreeMapFetching,
|
||||
!treeMapData?.data?.data?.[heatmapView]?.length &&
|
||||
!isGetMetricsTreemapLoading,
|
||||
[treeMapData?.data?.data, heatmapView, isGetMetricsTreemapLoading],
|
||||
);
|
||||
|
||||
const showFullScreenLoading = useMemo(
|
||||
() =>
|
||||
(isGetMetricsStatsLoading || isGetMetricsTreemapLoading) &&
|
||||
formattedMetricsData.length === 0 &&
|
||||
!treeMapData?.data?.data?.[heatmapView]?.length,
|
||||
[
|
||||
treeMapData?.payload?.data,
|
||||
isGetMetricsStatsLoading,
|
||||
isGetMetricsTreemapLoading,
|
||||
formattedMetricsData,
|
||||
treeMapData,
|
||||
heatmapView,
|
||||
isTreeMapLoading,
|
||||
isTreeMapFetching,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<Sentry.ErrorBoundary fallback={<ErrorBoundaryFallback />}>
|
||||
<div className="metrics-explorer-summary-tab">
|
||||
<MetricsSearch query={searchQuery} onChange={handleFilterChange} />
|
||||
{isMetricsLoading || isTreeMapLoading ? (
|
||||
<MetricsSearch query={query} onChange={handleFilterChange} />
|
||||
{showFullScreenLoading ? (
|
||||
<MetricsLoading />
|
||||
) : isMetricsListDataEmpty && isMetricsTreeMapDataEmpty ? (
|
||||
<NoLogs dataSource={DataSource.METRICS} />
|
||||
) : (
|
||||
<>
|
||||
<MetricsTreemap
|
||||
data={treeMapData?.payload}
|
||||
isLoading={isTreeMapLoading || isTreeMapFetching}
|
||||
data={treeMapData?.data?.data}
|
||||
isLoading={isGetMetricsTreemapLoading}
|
||||
isError={isProportionViewError}
|
||||
viewType={heatmapView}
|
||||
openMetricDetails={openMetricDetails}
|
||||
setHeatmapView={handleSetHeatmapView}
|
||||
/>
|
||||
<MetricsTable
|
||||
isLoading={isMetricsLoading || isMetricsFetching}
|
||||
isLoading={isGetMetricsStatsLoading}
|
||||
isError={isListViewError}
|
||||
data={formattedMetricsData}
|
||||
pageSize={pageSize}
|
||||
currentPage={currentPage}
|
||||
onPaginationChange={onPaginationChange}
|
||||
setOrderBy={handleSetOrderBy}
|
||||
totalCount={metricsData?.payload?.data?.total || 0}
|
||||
totalCount={metricsData?.data?.data?.total || 0}
|
||||
openMetricDetails={openMetricDetails}
|
||||
queryFilters={queryFilters}
|
||||
queryFilterExpression={queryFilterExpression}
|
||||
onFilterChange={handleFilterChange}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { fireEvent, render, screen } from '@testing-library/react';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import * as useGetMetricsListFilterValues from 'hooks/metricsExplorer/useGetMetricsListFilterValues';
|
||||
import * as useQueryBuilderOperationsHooks from 'hooks/queryBuilder/useQueryBuilderOperations';
|
||||
import store from 'store';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import MetricsTable from '../MetricsTable';
|
||||
import { MetricsListItemRowData } from '../types';
|
||||
@@ -30,9 +30,8 @@ const mockData: MetricsListItemRowData[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const mockQueryFilters: TagFilter = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
const mockQueryFilterExpression: Filter = {
|
||||
expression: '',
|
||||
};
|
||||
|
||||
jest.mock('react-router-dom-v5-compat', () => {
|
||||
@@ -82,7 +81,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -106,8 +106,9 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
isLoading
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -130,7 +131,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -158,7 +160,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -187,7 +190,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={jest.fn()}
|
||||
totalCount={2}
|
||||
openMetricDetails={mockOpenMetricDetails}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -212,7 +216,8 @@ describe('MetricsTable', () => {
|
||||
setOrderBy={mockSetOrderBy}
|
||||
totalCount={2}
|
||||
openMetricDetails={jest.fn()}
|
||||
queryFilters={mockQueryFilters}
|
||||
queryFilterExpression={mockQueryFilterExpression}
|
||||
onFilterChange={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
</MemoryRouter>,
|
||||
@@ -222,8 +227,10 @@ describe('MetricsTable', () => {
|
||||
fireEvent.click(samplesHeader);
|
||||
|
||||
expect(mockSetOrderBy).toHaveBeenCalledWith({
|
||||
columnName: 'samples',
|
||||
order: 'asc',
|
||||
key: {
|
||||
name: 'samples',
|
||||
},
|
||||
direction: 'asc',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { Provider } from 'react-redux';
|
||||
import { MemoryRouter } from 'react-router-dom';
|
||||
import { render, screen } from '@testing-library/react';
|
||||
import { MetricsexplorertypesTreemapModeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import store from 'store';
|
||||
|
||||
import MetricsTreemap from '../MetricsTreemap';
|
||||
import { TreemapViewType } from '../types';
|
||||
|
||||
jest.mock('d3-hierarchy', () => ({
|
||||
stratify: jest.fn().mockReturnValue({
|
||||
@@ -27,14 +27,14 @@ jest.mock('react-use', () => ({
|
||||
|
||||
const mockData = [
|
||||
{
|
||||
metric_name: 'Metric 1',
|
||||
metricName: 'Metric 1',
|
||||
percentage: 0.5,
|
||||
total_value: 15,
|
||||
totalValue: 15,
|
||||
},
|
||||
{
|
||||
metric_name: 'Metric 2',
|
||||
metricName: 'Metric 2',
|
||||
percentage: 0.6,
|
||||
total_value: 10,
|
||||
totalValue: 10,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -47,14 +47,11 @@ describe('MetricsTreemap', () => {
|
||||
isLoading={false}
|
||||
isError={false}
|
||||
data={{
|
||||
status: 'success',
|
||||
data: {
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
},
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
}}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
@@ -72,14 +69,11 @@ describe('MetricsTreemap', () => {
|
||||
isLoading
|
||||
isError={false}
|
||||
data={{
|
||||
status: 'success',
|
||||
data: {
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
},
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
}}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
@@ -99,14 +93,11 @@ describe('MetricsTreemap', () => {
|
||||
isLoading={false}
|
||||
isError
|
||||
data={{
|
||||
status: 'success',
|
||||
data: {
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
},
|
||||
timeseries: [mockData[0]],
|
||||
samples: [mockData[1]],
|
||||
}}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
@@ -130,7 +121,7 @@ describe('MetricsTreemap', () => {
|
||||
isError={false}
|
||||
data={null}
|
||||
openMetricDetails={jest.fn()}
|
||||
viewType={TreemapViewType.SAMPLES}
|
||||
viewType={MetricsexplorertypesTreemapModeDTO.samples}
|
||||
setHeatmapView={jest.fn()}
|
||||
/>
|
||||
</Provider>
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { render } from '@testing-library/react';
|
||||
import { MetrictypesTypeDTO } from 'api/generated/services/sigNoz.schemas';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { TreemapViewType } from '../types';
|
||||
import {
|
||||
@@ -11,52 +12,76 @@ import {
|
||||
MetricTypeRenderer,
|
||||
} from '../utils';
|
||||
|
||||
describe('metricsTableColumns', () => {
|
||||
const mockQueryFilters: TagFilter = {
|
||||
items: [],
|
||||
op: 'AND',
|
||||
};
|
||||
const mockQueryExpression: Filter = {
|
||||
expression: '',
|
||||
};
|
||||
const mockOnChange = jest.fn();
|
||||
|
||||
describe('metricsTableColumns', () => {
|
||||
it('should have correct column definitions', () => {
|
||||
expect(getMetricsTableColumns(mockQueryFilters)).toHaveLength(6);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange),
|
||||
).toHaveLength(6);
|
||||
|
||||
// Metric Name column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[0].dataIndex).toBe(
|
||||
'metric_name',
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[0].width).toBe(400);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[0].sorter).toBe(false);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].dataIndex,
|
||||
).toBe('metric_name');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].width,
|
||||
).toBe(400);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[0].sorter,
|
||||
).toBe(false);
|
||||
|
||||
// Description column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[1].dataIndex).toBe(
|
||||
'description',
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[1].width).toBe(400);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[1].dataIndex,
|
||||
).toBe('description');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[1].width,
|
||||
).toBe(400);
|
||||
|
||||
// Type column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[2].dataIndex).toBe(
|
||||
'metric_type',
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[2].width).toBe(150);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[2].sorter).toBe(false);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].dataIndex,
|
||||
).toBe('metric_type');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].width,
|
||||
).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[2].sorter,
|
||||
).toBe(false);
|
||||
|
||||
// Unit column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[3].dataIndex).toBe('unit');
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[3].width).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[3].dataIndex,
|
||||
).toBe('unit');
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[3].width,
|
||||
).toBe(150);
|
||||
|
||||
// Samples column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[4].dataIndex).toBe(
|
||||
TreemapViewType.SAMPLES,
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[4].width).toBe(150);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[4].sorter).toBe(true);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].dataIndex,
|
||||
).toBe(TreemapViewType.SAMPLES);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].width,
|
||||
).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[4].sorter,
|
||||
).toBe(true);
|
||||
|
||||
// Time Series column
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[5].dataIndex).toBe(
|
||||
TreemapViewType.TIMESERIES,
|
||||
);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[5].width).toBe(150);
|
||||
expect(getMetricsTableColumns(mockQueryFilters)[5].sorter).toBe(true);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].dataIndex,
|
||||
).toBe(TreemapViewType.TIMESERIES);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].width,
|
||||
).toBe(150);
|
||||
expect(
|
||||
getMetricsTableColumns(mockQueryExpression, mockOnChange)[5].sorter,
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
describe('MetricTypeRenderer', () => {
|
||||
@@ -111,12 +136,12 @@ describe('formatDataForMetricsTable', () => {
|
||||
it('should format metrics data correctly', () => {
|
||||
const mockData = [
|
||||
{
|
||||
metric_name: 'test_metric',
|
||||
metricName: 'test_metric',
|
||||
description: 'Test description',
|
||||
type: MetricType.GAUGE,
|
||||
type: MetrictypesTypeDTO.gauge,
|
||||
unit: 'bytes',
|
||||
[TreemapViewType.SAMPLES]: 1000,
|
||||
[TreemapViewType.TIMESERIES]: 2000,
|
||||
samples: 1000,
|
||||
timeseries: 2000,
|
||||
lastReceived: '2023-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
@@ -163,12 +188,12 @@ describe('formatDataForMetricsTable', () => {
|
||||
it('should handle empty/null values', () => {
|
||||
const mockData = [
|
||||
{
|
||||
metric_name: '',
|
||||
metricName: '',
|
||||
description: '',
|
||||
type: MetricType.GAUGE,
|
||||
type: MetrictypesTypeDTO.gauge,
|
||||
unit: '',
|
||||
[TreemapViewType.SAMPLES]: 0,
|
||||
[TreemapViewType.TIMESERIES]: 0,
|
||||
samples: 0,
|
||||
timeseries: 0,
|
||||
lastReceived: '2023-01-01T00:00:00Z',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
import {
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { MetricType } from 'api/metricsExplorer/getMetricsList';
|
||||
|
||||
import { TreemapViewType } from './types';
|
||||
|
||||
export const METRICS_TABLE_PAGE_SIZE = 10;
|
||||
|
||||
export const TREEMAP_VIEW_OPTIONS: {
|
||||
value: TreemapViewType;
|
||||
value: MetricsexplorertypesTreemapModeDTO;
|
||||
label: string;
|
||||
}[] = [
|
||||
{ value: TreemapViewType.TIMESERIES, label: 'Time Series' },
|
||||
{ value: TreemapViewType.SAMPLES, label: 'Samples' },
|
||||
{ value: MetricsexplorertypesTreemapModeDTO.timeseries, label: 'Time Series' },
|
||||
{ value: MetricsexplorertypesTreemapModeDTO.samples, label: 'Samples' },
|
||||
];
|
||||
|
||||
export const TREEMAP_HEIGHT = 200;
|
||||
@@ -17,6 +19,7 @@ export const TREEMAP_SQUARE_PADDING = 5;
|
||||
|
||||
export const TREEMAP_MARGINS = { TOP: 10, LEFT: 10, RIGHT: 10, BOTTOM: 10 };
|
||||
|
||||
// TODO: Remove this once API migration is complete
|
||||
export const METRIC_TYPE_LABEL_MAP = {
|
||||
[MetricType.SUM]: 'Sum',
|
||||
[MetricType.GAUGE]: 'Gauge',
|
||||
@@ -25,6 +28,14 @@ export const METRIC_TYPE_LABEL_MAP = {
|
||||
[MetricType.EXPONENTIAL_HISTOGRAM]: 'Exp. Histogram',
|
||||
};
|
||||
|
||||
export const METRIC_TYPE_LABEL_MAP_V2 = {
|
||||
[MetrictypesTypeDTO.sum]: 'Sum',
|
||||
[MetrictypesTypeDTO.gauge]: 'Gauge',
|
||||
[MetrictypesTypeDTO.histogram]: 'Histogram',
|
||||
[MetrictypesTypeDTO.summary]: 'Summary',
|
||||
[MetrictypesTypeDTO.exponentialhistogram]: 'Exp. Histogram',
|
||||
};
|
||||
|
||||
export const METRIC_TYPE_VALUES_MAP = {
|
||||
[MetricType.SUM]: 'Sum',
|
||||
[MetricType.GAUGE]: 'Gauge',
|
||||
@@ -36,4 +47,3 @@ export const METRIC_TYPE_VALUES_MAP = {
|
||||
export const IS_METRIC_DETAILS_OPEN_KEY = 'isMetricDetailsOpen';
|
||||
export const IS_INSPECT_MODAL_OPEN_KEY = 'isInspectModalOpen';
|
||||
export const SELECTED_METRIC_NAME_KEY = 'selectedMetricName';
|
||||
export const SUMMARY_FILTERS_KEY = 'summaryFilters';
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import React from 'react';
|
||||
import { MetricsTreeMapResponse } from 'api/metricsExplorer/getMetricsTreeMap';
|
||||
import {
|
||||
IBuilderQuery,
|
||||
TagFilter,
|
||||
} from 'types/api/queryBuilder/queryBuilderData';
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetricsexplorertypesTreemapResponseDTO,
|
||||
Querybuildertypesv5OrderByDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
export interface MetricsTableProps {
|
||||
isLoading: boolean;
|
||||
@@ -12,24 +14,25 @@ export interface MetricsTableProps {
|
||||
pageSize: number;
|
||||
currentPage: number;
|
||||
onPaginationChange: (page: number, pageSize: number) => void;
|
||||
setOrderBy: (orderBy: OrderByPayload) => void;
|
||||
setOrderBy: (orderBy: Querybuildertypesv5OrderByDTO) => void;
|
||||
totalCount: number;
|
||||
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
|
||||
queryFilters: TagFilter;
|
||||
queryFilterExpression: Filter;
|
||||
onFilterChange: (expression: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricsSearchProps {
|
||||
query: IBuilderQuery;
|
||||
onChange: (value: TagFilter) => void;
|
||||
onChange: (expression: string) => void;
|
||||
}
|
||||
|
||||
export interface MetricsTreemapProps {
|
||||
data: MetricsTreeMapResponse | null | undefined;
|
||||
data: MetricsexplorertypesTreemapResponseDTO | null | undefined;
|
||||
isLoading: boolean;
|
||||
isError: boolean;
|
||||
viewType: TreemapViewType;
|
||||
viewType: MetricsexplorertypesTreemapModeDTO;
|
||||
openMetricDetails: (metricName: string, view: 'list' | 'treemap') => void;
|
||||
setHeatmapView: (value: TreemapViewType) => void;
|
||||
setHeatmapView: (value: MetricsexplorertypesTreemapModeDTO) => void;
|
||||
}
|
||||
|
||||
export interface OrderByPayload {
|
||||
|
||||
@@ -3,14 +3,16 @@ import { Color } from '@signozhq/design-tokens';
|
||||
import { Tooltip, Typography } from 'antd';
|
||||
import { ColumnType } from 'antd/es/table';
|
||||
import {
|
||||
MetricsListItemData,
|
||||
MetricsexplorertypesStatDTO,
|
||||
MetricsexplorertypesTreemapEntryDTO,
|
||||
MetricsexplorertypesTreemapModeDTO,
|
||||
MetrictypesTypeDTO,
|
||||
} from 'api/generated/services/sigNoz.schemas';
|
||||
import {
|
||||
MetricsListPayload,
|
||||
MetricType,
|
||||
} from 'api/metricsExplorer/getMetricsList';
|
||||
import {
|
||||
SamplesData,
|
||||
TimeseriesData,
|
||||
} from 'api/metricsExplorer/getMetricsTreeMap';
|
||||
import { Filter } from 'api/v5/v5';
|
||||
import { getUniversalNameFromMetricUnit } from 'components/YAxisUnitSelector/utils';
|
||||
import {
|
||||
BarChart,
|
||||
@@ -19,21 +21,23 @@ import {
|
||||
Diff,
|
||||
Gauge,
|
||||
} from 'lucide-react';
|
||||
import { TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
|
||||
import { METRIC_TYPE_LABEL_MAP } from './constants';
|
||||
import { METRIC_TYPE_LABEL_MAP, METRIC_TYPE_LABEL_MAP_V2 } from './constants';
|
||||
import MetricNameSearch from './MetricNameSearch';
|
||||
import MetricTypeSearch from './MetricTypeSearch';
|
||||
import { MetricsListItemRowData, TreemapTile, TreemapViewType } from './types';
|
||||
import { MetricsListItemRowData, TreemapTile } from './types';
|
||||
|
||||
export const getMetricsTableColumns = (
|
||||
queryFilters: TagFilter,
|
||||
queryFilterExpression: Filter,
|
||||
onFilterChange: (expression: string) => void,
|
||||
): ColumnType<MetricsListItemRowData>[] => [
|
||||
{
|
||||
title: (
|
||||
<div className="metric-name-column-header">
|
||||
<span className="metric-name-column-header-text">METRIC</span>
|
||||
<MetricNameSearch queryFilters={queryFilters} />
|
||||
<MetricNameSearch
|
||||
queryFilterExpression={queryFilterExpression}
|
||||
onFilterChange={onFilterChange}
|
||||
/>
|
||||
</div>
|
||||
),
|
||||
dataIndex: 'metric_name',
|
||||
@@ -55,7 +59,11 @@ export const getMetricsTableColumns = (
|
||||
title: (
|
||||
<div className="metric-type-column-header">
|
||||
<span className="metric-type-column-header-text">TYPE</span>
|
||||
<MetricTypeSearch queryFilters={queryFilters} />
|
||||
{/* TODO: @amlannandy: Re-enable once API supports metric type filtering */}
|
||||
{/* <MetricTypeSearch
|
||||
queryFilters={queryFilters}
|
||||
onFilterChange={onFilterChange}
|
||||
/> */}
|
||||
</div>
|
||||
),
|
||||
dataIndex: 'metric_type',
|
||||
@@ -69,13 +77,13 @@ export const getMetricsTableColumns = (
|
||||
},
|
||||
{
|
||||
title: 'SAMPLES',
|
||||
dataIndex: TreemapViewType.SAMPLES,
|
||||
dataIndex: MetricsexplorertypesTreemapModeDTO.samples,
|
||||
width: 150,
|
||||
sorter: true,
|
||||
},
|
||||
{
|
||||
title: 'TIME SERIES',
|
||||
dataIndex: TreemapViewType.TIMESERIES,
|
||||
dataIndex: MetricsexplorertypesTreemapModeDTO.timeseries,
|
||||
width: 150,
|
||||
sorter: true,
|
||||
},
|
||||
@@ -143,6 +151,60 @@ export function MetricTypeRenderer({
|
||||
);
|
||||
}
|
||||
|
||||
export function MetricTypeRendererV2({
|
||||
type,
|
||||
}: {
|
||||
type: MetrictypesTypeDTO;
|
||||
}): JSX.Element {
|
||||
const [icon, color] = useMemo(() => {
|
||||
switch (type) {
|
||||
case MetrictypesTypeDTO.sum:
|
||||
return [
|
||||
<Diff key={type} size={12} color={Color.BG_ROBIN_500} />,
|
||||
Color.BG_ROBIN_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.gauge:
|
||||
return [
|
||||
<Gauge key={type} size={12} color={Color.BG_SAKURA_500} />,
|
||||
Color.BG_SAKURA_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.histogram:
|
||||
return [
|
||||
<BarChart2 key={type} size={12} color={Color.BG_SIENNA_500} />,
|
||||
Color.BG_SIENNA_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.summary:
|
||||
return [
|
||||
<BarChartHorizontal key={type} size={12} color={Color.BG_FOREST_500} />,
|
||||
Color.BG_FOREST_500,
|
||||
];
|
||||
case MetrictypesTypeDTO.exponentialhistogram:
|
||||
return [
|
||||
<BarChart key={type} size={12} color={Color.BG_AQUA_500} />,
|
||||
Color.BG_AQUA_500,
|
||||
];
|
||||
default:
|
||||
return [null, ''];
|
||||
}
|
||||
}, [type]);
|
||||
|
||||
return (
|
||||
<div
|
||||
className="metric-type-renderer"
|
||||
style={{
|
||||
backgroundColor: `${color}33`,
|
||||
border: `1px solid ${color}`,
|
||||
color,
|
||||
}}
|
||||
>
|
||||
{icon}
|
||||
<Typography.Text style={{ color, fontSize: 12 }}>
|
||||
{METRIC_TYPE_LABEL_MAP_V2[type]}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function ValidateRowValueWrapper({
|
||||
value,
|
||||
children,
|
||||
@@ -182,13 +244,13 @@ export const formatNumberIntoHumanReadableFormat = (
|
||||
};
|
||||
|
||||
export const formatDataForMetricsTable = (
|
||||
data: MetricsListItemData[],
|
||||
data: MetricsexplorertypesStatDTO[],
|
||||
): MetricsListItemRowData[] =>
|
||||
data.map((metric) => ({
|
||||
key: metric.metric_name,
|
||||
key: metric.metricName,
|
||||
metric_name: (
|
||||
<ValidateRowValueWrapper value={metric.metric_name}>
|
||||
<Tooltip title={metric.metric_name}>{metric.metric_name}</Tooltip>
|
||||
<ValidateRowValueWrapper value={metric.metricName}>
|
||||
<Tooltip title={metric.metricName}>{metric.metricName}</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
description: (
|
||||
@@ -198,39 +260,54 @@ export const formatDataForMetricsTable = (
|
||||
</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
metric_type: <MetricTypeRenderer type={metric.type} />,
|
||||
metric_type: <MetricTypeRendererV2 type={metric.type} />,
|
||||
unit: (
|
||||
<ValidateRowValueWrapper value={getUniversalNameFromMetricUnit(metric.unit)}>
|
||||
{getUniversalNameFromMetricUnit(metric.unit)}
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
[TreemapViewType.SAMPLES]: (
|
||||
<ValidateRowValueWrapper value={metric[TreemapViewType.SAMPLES]}>
|
||||
<Tooltip title={metric[TreemapViewType.SAMPLES].toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric[TreemapViewType.SAMPLES])}
|
||||
[MetricsexplorertypesTreemapModeDTO.samples]: (
|
||||
<ValidateRowValueWrapper
|
||||
value={metric[MetricsexplorertypesTreemapModeDTO.samples]}
|
||||
>
|
||||
<Tooltip
|
||||
title={metric[MetricsexplorertypesTreemapModeDTO.samples].toLocaleString()}
|
||||
>
|
||||
{formatNumberIntoHumanReadableFormat(
|
||||
metric[MetricsexplorertypesTreemapModeDTO.samples],
|
||||
)}
|
||||
</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
[TreemapViewType.TIMESERIES]: (
|
||||
<ValidateRowValueWrapper value={metric[TreemapViewType.TIMESERIES]}>
|
||||
<Tooltip title={metric[TreemapViewType.TIMESERIES].toLocaleString()}>
|
||||
{formatNumberIntoHumanReadableFormat(metric[TreemapViewType.TIMESERIES])}
|
||||
[MetricsexplorertypesTreemapModeDTO.timeseries]: (
|
||||
<ValidateRowValueWrapper
|
||||
value={metric[MetricsexplorertypesTreemapModeDTO.timeseries]}
|
||||
>
|
||||
<Tooltip
|
||||
title={metric[
|
||||
MetricsexplorertypesTreemapModeDTO.timeseries
|
||||
].toLocaleString()}
|
||||
>
|
||||
{formatNumberIntoHumanReadableFormat(
|
||||
metric[MetricsexplorertypesTreemapModeDTO.timeseries],
|
||||
)}
|
||||
</Tooltip>
|
||||
</ValidateRowValueWrapper>
|
||||
),
|
||||
}));
|
||||
|
||||
export const transformTreemapData = (
|
||||
data: TimeseriesData[] | SamplesData[],
|
||||
viewType: TreemapViewType,
|
||||
data: MetricsexplorertypesTreemapEntryDTO[],
|
||||
viewType: MetricsexplorertypesTreemapModeDTO,
|
||||
): TreemapTile[] => {
|
||||
const totalSize = (data as (TimeseriesData | SamplesData)[]).reduce(
|
||||
(acc: number, item: TimeseriesData | SamplesData) => acc + item.percentage,
|
||||
const totalSize = data.reduce(
|
||||
(acc: number, item: MetricsexplorertypesTreemapEntryDTO) =>
|
||||
acc + item.percentage,
|
||||
0,
|
||||
);
|
||||
|
||||
const children = data.map((item) => ({
|
||||
id: item.metric_name,
|
||||
id: item.metricName,
|
||||
size: totalSize > 0 ? Number((item.percentage / totalSize).toFixed(2)) : 0,
|
||||
displayValue: Number(item.percentage).toFixed(2),
|
||||
parent: viewType,
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import BarPanel from 'container/DashboardContainer/visualization/panels/BarPanel/BarPanel';
|
||||
import HistogramPanel from 'container/DashboardContainer/visualization/panels/HistogramPanel/HistogramPanel';
|
||||
|
||||
import TimeSeriesPanel from '../DashboardContainer/visualization/panels/TimeSeriesPanel/TimeSeriesPanel';
|
||||
import HistogramPanelWrapper from './HistogramPanelWrapper';
|
||||
import ListPanelWrapper from './ListPanelWrapper';
|
||||
import PiePanelWrapper from './PiePanelWrapper';
|
||||
import TablePanelWrapper from './TablePanelWrapper';
|
||||
@@ -17,7 +17,7 @@ export const PanelTypeVsPanelWrapper = {
|
||||
[PANEL_TYPES.EMPTY_WIDGET]: null,
|
||||
[PANEL_TYPES.PIE]: PiePanelWrapper,
|
||||
[PANEL_TYPES.BAR]: BarPanel,
|
||||
[PANEL_TYPES.HISTOGRAM]: HistogramPanelWrapper,
|
||||
[PANEL_TYPES.HISTOGRAM]: HistogramPanel,
|
||||
};
|
||||
|
||||
export const DEFAULT_BUCKET_COUNT = 30;
|
||||
|
||||
@@ -9,9 +9,6 @@ export const useGetQueryKeyValueSuggestions = ({
|
||||
searchText,
|
||||
signalSource,
|
||||
metricName,
|
||||
startUnixMilli,
|
||||
endUnixMilli,
|
||||
existingQuery,
|
||||
options,
|
||||
}: {
|
||||
key: string;
|
||||
@@ -23,9 +20,6 @@ export const useGetQueryKeyValueSuggestions = ({
|
||||
AxiosError
|
||||
>;
|
||||
metricName?: string;
|
||||
startUnixMilli?: number;
|
||||
endUnixMilli?: number;
|
||||
existingQuery?: string;
|
||||
}): UseQueryResult<
|
||||
AxiosResponse<QueryKeyValueSuggestionsResponseProps>,
|
||||
AxiosError
|
||||
@@ -46,9 +40,6 @@ export const useGetQueryKeyValueSuggestions = ({
|
||||
searchText: searchText || '',
|
||||
signalSource: signalSource as 'meter' | '',
|
||||
metricName: metricName || '',
|
||||
startUnixMilli,
|
||||
endUnixMilli,
|
||||
existingQuery,
|
||||
}),
|
||||
...options,
|
||||
});
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
import { HistogramTooltipProps } from '../types';
|
||||
import Tooltip from './Tooltip';
|
||||
|
||||
export default function HistogramTooltip(
|
||||
props: HistogramTooltipProps,
|
||||
): JSX.Element {
|
||||
return <Tooltip {...props} showTooltipHeader={false} />;
|
||||
}
|
||||
@@ -16,12 +16,16 @@ export default function Tooltip({
|
||||
uPlotInstance,
|
||||
timezone,
|
||||
content,
|
||||
showTooltipHeader = true,
|
||||
}: TooltipProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const tooltipContent = content ?? [];
|
||||
|
||||
const headerTitle = useMemo(() => {
|
||||
if (!showTooltipHeader) {
|
||||
return null;
|
||||
}
|
||||
const data = uPlotInstance.data;
|
||||
const cursorIdx = uPlotInstance.cursor.idx;
|
||||
if (cursorIdx == null) {
|
||||
@@ -30,7 +34,12 @@ export default function Tooltip({
|
||||
return dayjs(data[0][cursorIdx] * 1000)
|
||||
.tz(timezone)
|
||||
.format(DATE_TIME_FORMATS.MONTH_DATETIME_SECONDS);
|
||||
}, [timezone, uPlotInstance.data, uPlotInstance.cursor.idx]);
|
||||
}, [
|
||||
timezone,
|
||||
uPlotInstance.data,
|
||||
uPlotInstance.cursor.idx,
|
||||
showTooltipHeader,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -39,9 +48,11 @@ export default function Tooltip({
|
||||
isDarkMode ? 'darkMode' : 'lightMode',
|
||||
)}
|
||||
>
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
{showTooltipHeader && (
|
||||
<div className="uplot-tooltip-header">
|
||||
<span>{headerTitle}</span>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
style={{
|
||||
height: Math.min(
|
||||
|
||||
@@ -60,6 +60,7 @@ export interface TooltipRenderArgs {
|
||||
}
|
||||
|
||||
export interface BaseTooltipProps {
|
||||
showTooltipHeader?: boolean;
|
||||
timezone: string;
|
||||
yAxisUnit?: string;
|
||||
decimalPrecision?: PrecisionOption;
|
||||
@@ -74,7 +75,14 @@ export interface BarTooltipProps extends BaseTooltipProps, TooltipRenderArgs {
|
||||
isStackedBarChart?: boolean;
|
||||
}
|
||||
|
||||
export type TooltipProps = TimeSeriesTooltipProps | BarTooltipProps;
|
||||
export interface HistogramTooltipProps
|
||||
extends BaseTooltipProps,
|
||||
TooltipRenderArgs {}
|
||||
|
||||
export type TooltipProps =
|
||||
| TimeSeriesTooltipProps
|
||||
| BarTooltipProps
|
||||
| HistogramTooltipProps;
|
||||
|
||||
export enum LegendPosition {
|
||||
BOTTOM = 'bottom',
|
||||
|
||||
@@ -387,7 +387,7 @@ export class UPlotConfigBuilder extends ConfigBuilder<
|
||||
} = this.getVisibilityResolutionState();
|
||||
|
||||
config.series = [
|
||||
{ value: (): string => '' }, // Base series for timestamp
|
||||
{ value: (): string => '', label: 'Timestamp' }, // Base series for timestamp
|
||||
...this.series.map((s) => {
|
||||
const series = s.getConfig();
|
||||
// Stored visibility[0] is x-axis/time; data series start at visibility[1]
|
||||
|
||||
@@ -49,7 +49,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
}: {
|
||||
resolvedLineColor: string;
|
||||
}): Partial<Series> {
|
||||
const { lineWidth, lineStyle, lineCap } = this.props;
|
||||
const { lineWidth, lineStyle, lineCap, fillColor } = this.props;
|
||||
const lineConfig: Partial<Series> = {
|
||||
stroke: resolvedLineColor,
|
||||
width: lineWidth ?? 2,
|
||||
@@ -63,8 +63,12 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
lineConfig.cap = lineCap;
|
||||
}
|
||||
|
||||
if (this.props.panelType === PANEL_TYPES.BAR) {
|
||||
if (fillColor) {
|
||||
lineConfig.fill = fillColor;
|
||||
} else if (this.props.panelType === PANEL_TYPES.BAR) {
|
||||
lineConfig.fill = resolvedLineColor;
|
||||
} else if (this.props.panelType === PANEL_TYPES.HISTOGRAM) {
|
||||
lineConfig.fill = `${resolvedLineColor}40`;
|
||||
}
|
||||
|
||||
return lineConfig;
|
||||
@@ -147,6 +151,8 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
|
||||
pointsConfig.show = false;
|
||||
} else if (showPoints === VisibilityMode.Always) {
|
||||
pointsConfig.show = true;
|
||||
} else {
|
||||
pointsConfig.show = false; // default to hidden
|
||||
}
|
||||
|
||||
return pointsConfig;
|
||||
|
||||
@@ -175,6 +175,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
|
||||
pointsBuilder?: Series.Points.Show;
|
||||
show?: boolean;
|
||||
spanGaps?: boolean;
|
||||
fillColor?: string;
|
||||
isDarkMode?: boolean;
|
||||
stepInterval?: number;
|
||||
}
|
||||
|
||||
@@ -11,22 +11,6 @@ import { Threshold } from '../hooks/types';
|
||||
import { findMinMaxThresholdValues } from './threshold';
|
||||
import { LogScaleLimits, RangeFunctionParams } from './types';
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps min/max/softMin/softMax to valid log-scale values (powers of logBase).
|
||||
* Only applies when distribution is logarithmic; otherwise returns limits unchanged.
|
||||
@@ -213,25 +197,6 @@ function getLogScaleRange(
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps linear scale min down and max up to whole numbers so axis bounds are clean.
|
||||
*/
|
||||
function roundLinearRange(minMax: Range.MinMax): Range.MinMax {
|
||||
const [currentMin, currentMax] = minMax;
|
||||
let roundedMin = currentMin;
|
||||
let roundedMax = currentMax;
|
||||
|
||||
if (roundedMin != null) {
|
||||
roundedMin = incrRoundDn(roundedMin, 1);
|
||||
}
|
||||
|
||||
if (roundedMax != null) {
|
||||
roundedMax = incrRoundUp(roundedMax, 1);
|
||||
}
|
||||
|
||||
return [roundedMin, roundedMax];
|
||||
}
|
||||
|
||||
/**
|
||||
* Snaps log-scale [min, max] to exact powers of logBase (nearest magnitude below/above).
|
||||
* If min and max would be equal after snapping, max is increased by one magnitude so the range is valid.
|
||||
@@ -330,7 +295,6 @@ export function createRangeFunction(
|
||||
|
||||
if (scale.distr === 1) {
|
||||
minMax = getLinearScaleRange(minMax, params, dataMin, dataMax);
|
||||
minMax = roundLinearRange(minMax);
|
||||
} else if (scale.distr === 3) {
|
||||
minMax = getLogScaleRange(minMax, params, dataMin, dataMax, logBase);
|
||||
const logFn = scale.log === 2 ? Math.log2 : Math.log10;
|
||||
|
||||
@@ -118,13 +118,13 @@ export const otherFiltersResponse = {
|
||||
export const quickFiltersAttributeValuesResponse = {
|
||||
status: 'success',
|
||||
data: {
|
||||
data: {
|
||||
values: {
|
||||
relatedValues: ['mq-kafka', 'otel-demo', 'otlp-python', 'sample-flask'],
|
||||
stringValues: ['mq-kafka', 'otel-demo', 'otlp-python', 'sample-flask'],
|
||||
numberValues: [],
|
||||
},
|
||||
complete: true,
|
||||
},
|
||||
stringAttributeValues: [
|
||||
'mq-kafka',
|
||||
'otel-demo',
|
||||
'otlp-python',
|
||||
'sample-flask',
|
||||
],
|
||||
numberAttributeValues: null,
|
||||
boolAttributeValues: null,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -583,39 +583,6 @@ body {
|
||||
}
|
||||
}
|
||||
|
||||
.ant-btn-text:hover,
|
||||
.ant-btn-text:focus-visible {
|
||||
background-color: var(--bg-vanilla-200) !important;
|
||||
}
|
||||
|
||||
.ant-btn-link:hover,
|
||||
.ant-btn-link:focus-visible {
|
||||
background-color: transparent !important;
|
||||
}
|
||||
|
||||
.ant-btn-default:hover,
|
||||
.ant-btn-default:focus-visible,
|
||||
.ant-btn-text:not(.ant-btn-primary):hover,
|
||||
.ant-btn-text:not(.ant-btn-primary):focus-visible,
|
||||
.ant-btn:not(.ant-btn-primary):not(.ant-btn-dangerous):hover,
|
||||
.ant-btn:not(.ant-btn-primary):not(.ant-btn-dangerous):focus-visible {
|
||||
background-color: var(--bg-vanilla-200) !important;
|
||||
}
|
||||
|
||||
.ant-typography:hover,
|
||||
.ant-typography:focus-visible {
|
||||
background-color: transparent !important;
|
||||
}
|
||||
|
||||
.ant-tooltip {
|
||||
--antd-arrow-background-color: var(--bg-vanilla-300);
|
||||
|
||||
.ant-tooltip-inner {
|
||||
background-color: var(--bg-vanilla-200);
|
||||
color: var(--bg-ink-500);
|
||||
}
|
||||
}
|
||||
|
||||
// Enhanced legend light mode styles
|
||||
.u-legend-enhanced {
|
||||
// Light mode scrollbar styling
|
||||
|
||||
@@ -47,9 +47,6 @@ export interface QueryKeyValueRequestProps {
|
||||
searchText: string;
|
||||
signalSource?: 'meter' | '';
|
||||
metricName?: string;
|
||||
startUnixMilli?: number;
|
||||
endUnixMilli?: number;
|
||||
existingQuery?: string;
|
||||
}
|
||||
|
||||
export type SignalType = 'traces' | 'logs' | 'metrics';
|
||||
|
||||
@@ -3,3 +3,33 @@
|
||||
* Example: 1.5 → 2, 1.49 → 1
|
||||
*/
|
||||
export const roundHalfUp = (value: number): number => Math.floor(value + 0.5);
|
||||
|
||||
/**
|
||||
* Rounds a number down to the nearest multiple of incr.
|
||||
* Used for linear scale min so the axis starts on a clean tick.
|
||||
*/
|
||||
export function incrRoundDn(num: number, incr: number): number {
|
||||
return Math.floor(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number up to the nearest multiple of incr.
|
||||
* Used for linear scale max so the axis ends on a clean tick.
|
||||
*/
|
||||
export function incrRoundUp(num: number, incr: number): number {
|
||||
return Math.ceil(num / incr) * incr;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rounds a number to the nearest multiple of 10^dec.
|
||||
* Used for decimal precision.
|
||||
*/
|
||||
export function roundDecimals(val: number, dec = 0): number {
|
||||
if (Number.isInteger(val)) {
|
||||
return val;
|
||||
}
|
||||
|
||||
const p = 10 ** dec;
|
||||
const n = val * p * (1 + Number.EPSILON);
|
||||
return Math.round(n) / p;
|
||||
}
|
||||
|
||||
@@ -3,9 +3,8 @@ package flagger
|
||||
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureInterpolationEnabled = featuretypes.MustNewName("interpolation_enabled")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -18,14 +17,6 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureInterpolationEnabled,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether to enable interpolation",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureKafkaSpanEval,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
@@ -30,3 +30,7 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
|
||||
|
||||
return module.store.ListByKeyRange(ctx, start, end)
|
||||
}
|
||||
|
||||
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
return module.store.GetByName(ctx, name)
|
||||
}
|
||||
|
||||
@@ -47,6 +47,22 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
|
||||
organization := new(types.Organization)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDB().
|
||||
NewSelect().
|
||||
Model(organization).
|
||||
Where("name = ?", name).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
|
||||
}
|
||||
|
||||
return organization, nil
|
||||
}
|
||||
|
||||
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
|
||||
organizations := make([]*types.Organization, 0)
|
||||
err := store.
|
||||
|
||||
@@ -14,6 +14,9 @@ type Getter interface {
|
||||
|
||||
// ListByOwnedKeyRange gets all the organizations owned by the instance
|
||||
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
|
||||
|
||||
// Gets the organization by name
|
||||
GetByName(context.Context, string) (*types.Organization, error)
|
||||
}
|
||||
|
||||
type Setter interface {
|
||||
|
||||
@@ -151,6 +151,10 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
|
||||
return "", err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
|
||||
}
|
||||
|
||||
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
||||
@@ -5,11 +5,26 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Password PasswordConfig `mapstructure:"password"`
|
||||
Root RootConfig `mapstructure:"root"`
|
||||
}
|
||||
|
||||
type RootConfig struct {
|
||||
Enabled bool `mapstructure:"enabled"`
|
||||
Email valuer.Email `mapstructure:"email"`
|
||||
Password string `mapstructure:"password"`
|
||||
Org OrgConfig `mapstructure:"org"`
|
||||
}
|
||||
|
||||
type OrgConfig struct {
|
||||
Name string `mapstructure:"name"`
|
||||
}
|
||||
|
||||
type PasswordConfig struct {
|
||||
Reset ResetConfig `mapstructure:"reset"`
|
||||
}
|
||||
@@ -31,6 +46,12 @@ func newConfig() factory.Config {
|
||||
MaxTokenLifetime: 6 * time.Hour,
|
||||
},
|
||||
},
|
||||
Root: RootConfig{
|
||||
Enabled: false,
|
||||
Org: OrgConfig{
|
||||
Name: "default",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,5 +60,17 @@ func (c Config) Validate() error {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
|
||||
}
|
||||
|
||||
if c.Root.Enabled {
|
||||
if c.Root.Email.IsZero() {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
|
||||
}
|
||||
if c.Root.Password == "" {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
|
||||
}
|
||||
if !types.IsPasswordValid(c.Root.Password) {
|
||||
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -16,6 +16,10 @@ func NewGetter(store types.UserStore) user.Getter {
|
||||
return &getter{store: store}
|
||||
}
|
||||
|
||||
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
return module.store.GetRootUserByOrgID(ctx, orgID)
|
||||
}
|
||||
|
||||
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
|
||||
users, err := module.store.ListUsersByOrgID(ctx, orgID)
|
||||
if err != nil {
|
||||
|
||||
@@ -103,6 +103,12 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
|
||||
}
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
|
||||
}
|
||||
@@ -202,27 +208,21 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := existingUser.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot update root user")
|
||||
}
|
||||
|
||||
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// only displayName, role can be updated
|
||||
if user.DisplayName == "" {
|
||||
user.DisplayName = existingUser.DisplayName
|
||||
}
|
||||
|
||||
if user.Role == "" {
|
||||
user.Role = existingUser.Role
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
|
||||
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
|
||||
}
|
||||
|
||||
// Make sure that th e request is not demoting the last admin user.
|
||||
// also an admin user can only change role of their own or other user
|
||||
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
// Make sure that the request is not demoting the last admin user.
|
||||
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
|
||||
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -233,7 +233,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
if user.Role != existingUser.Role {
|
||||
if user.Role != "" && user.Role != existingUser.Role {
|
||||
err = m.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
|
||||
@@ -245,23 +245,28 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
|
||||
}
|
||||
}
|
||||
|
||||
user.UpdatedAt = time.Now()
|
||||
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
|
||||
if err != nil {
|
||||
existingUser.Update(user.DisplayName, user.Role)
|
||||
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
traits := types.NewTraitsFromUser(updatedUser)
|
||||
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
return existingUser, nil
|
||||
}
|
||||
|
||||
traits["updated_by"] = updatedBy
|
||||
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
|
||||
return nil, err
|
||||
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return updatedUser, nil
|
||||
traits := types.NewTraitsFromUser(user)
|
||||
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
|
||||
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
|
||||
|
||||
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
|
||||
@@ -270,6 +275,10 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot delete root user")
|
||||
}
|
||||
|
||||
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
|
||||
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
|
||||
}
|
||||
@@ -302,6 +311,15 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
|
||||
}
|
||||
|
||||
func (module *Module) GetOrCreateResetPasswordToken(ctx context.Context, userID valuer.UUID) (*types.ResetPasswordToken, error) {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return nil, errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
@@ -364,6 +382,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
|
||||
if err != nil {
|
||||
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
|
||||
@@ -407,6 +429,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
return err
|
||||
}
|
||||
|
||||
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot reset password for root user")
|
||||
}
|
||||
|
||||
if err := password.Update(passwd); err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -415,6 +446,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
|
||||
}
|
||||
|
||||
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
|
||||
user, err := module.store.GetUser(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := user.ErrIfRoot(); err != nil {
|
||||
return errors.WithAdditionalf(err, "cannot change password for root user")
|
||||
}
|
||||
|
||||
password, err := module.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -476,7 +516,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
|
||||
}
|
||||
|
||||
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
|
||||
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
|
||||
user, err := types.NewRootUser(name, email, organization.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
187
pkg/modules/user/impluser/service.go
Normal file
187
pkg/modules/user/impluser/service.go
Normal file
@@ -0,0 +1,187 @@
|
||||
package impluser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/authz"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/modules/organization"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/types"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/roletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type service struct {
|
||||
settings factory.ScopedProviderSettings
|
||||
store types.UserStore
|
||||
module user.Module
|
||||
orgGetter organization.Getter
|
||||
authz authz.AuthZ
|
||||
config user.RootConfig
|
||||
stopC chan struct{}
|
||||
}
|
||||
|
||||
func NewService(
|
||||
providerSettings factory.ProviderSettings,
|
||||
store types.UserStore,
|
||||
module user.Module,
|
||||
orgGetter organization.Getter,
|
||||
authz authz.AuthZ,
|
||||
config user.RootConfig,
|
||||
) user.Service {
|
||||
return &service{
|
||||
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
|
||||
store: store,
|
||||
module: module,
|
||||
orgGetter: orgGetter,
|
||||
authz: authz,
|
||||
config: config,
|
||||
stopC: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Start(ctx context.Context) error {
|
||||
if !s.config.Enabled {
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
ticker := time.NewTicker(10 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
err := s.reconcile(ctx)
|
||||
if err == nil {
|
||||
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
|
||||
<-s.stopC
|
||||
return nil
|
||||
}
|
||||
|
||||
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
|
||||
|
||||
select {
|
||||
case <-s.stopC:
|
||||
return nil
|
||||
case <-ticker.C:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *service) Stop(ctx context.Context) error {
|
||||
close(s.stopC)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) reconcile(ctx context.Context) error {
|
||||
org, err := s.orgGetter.GetByName(ctx, s.config.Org.Name)
|
||||
if err != nil {
|
||||
if errors.Ast(err, errors.TypeNotFound) {
|
||||
newOrg := types.NewOrganization(s.config.Org.Name, s.config.Org.Name)
|
||||
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
|
||||
return err
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
return s.reconcileRootUser(ctx, org.ID)
|
||||
}
|
||||
|
||||
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingRoot == nil {
|
||||
return s.createOrPromoteRootUser(ctx, orgID)
|
||||
}
|
||||
|
||||
return s.updateExistingRootUser(ctx, orgID, existingRoot)
|
||||
}
|
||||
|
||||
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
|
||||
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
|
||||
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
oldRole := existingUser.Role
|
||||
|
||||
existingUser.PromoteToRoot()
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if oldRole != types.RoleAdmin {
|
||||
if err := s.authz.ModifyGrant(ctx,
|
||||
orgID,
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
|
||||
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
|
||||
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingUser.ID)
|
||||
}
|
||||
|
||||
// Create new root user
|
||||
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
|
||||
}
|
||||
|
||||
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
|
||||
existingRoot.PromoteToRoot()
|
||||
|
||||
if existingRoot.Email != s.config.Email {
|
||||
existingRoot.UpdateEmail(s.config.Email)
|
||||
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return s.setPassword(ctx, existingRoot.ID)
|
||||
}
|
||||
|
||||
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
|
||||
password, err := s.store.GetPasswordByUserID(ctx, userID)
|
||||
if err != nil {
|
||||
if !errors.Ast(err, errors.TypeNotFound) {
|
||||
return err
|
||||
}
|
||||
|
||||
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.CreatePassword(ctx, factorPassword)
|
||||
}
|
||||
|
||||
if !password.Equals(s.config.Password) {
|
||||
if err := password.Update(s.config.Password); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return s.store.UpdatePassword(ctx, password)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -210,20 +210,24 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
|
||||
return users, nil
|
||||
}
|
||||
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
|
||||
user.UpdatedAt = time.Now()
|
||||
_, err := store.sqlstore.BunDB().NewUpdate().
|
||||
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
|
||||
_, err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewUpdate().
|
||||
Model(user).
|
||||
Column("display_name").
|
||||
Column("email").
|
||||
Column("role").
|
||||
Column("is_root").
|
||||
Column("updated_at").
|
||||
Where("id = ?", id).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("id = ?", user.ID).
|
||||
Exec(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
|
||||
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
|
||||
}
|
||||
return user, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
|
||||
@@ -602,6 +606,22 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
|
||||
})
|
||||
}
|
||||
|
||||
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
|
||||
user := new(types.User)
|
||||
err := store.
|
||||
sqlstore.
|
||||
BunDBCtx(ctx).
|
||||
NewSelect().
|
||||
Model(user).
|
||||
Where("org_id = ?", orgID).
|
||||
Where("is_root = ?", true).
|
||||
Scan(ctx)
|
||||
if err != nil {
|
||||
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
|
||||
users := []*types.User{}
|
||||
err := store.
|
||||
|
||||
7
pkg/modules/user/service.go
Normal file
7
pkg/modules/user/service.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package user
|
||||
|
||||
import "github.com/SigNoz/signoz/pkg/factory"
|
||||
|
||||
type Service interface {
|
||||
factory.Service
|
||||
}
|
||||
@@ -34,6 +34,9 @@ type Module interface {
|
||||
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
|
||||
|
||||
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
|
||||
|
||||
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
|
||||
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
|
||||
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
|
||||
|
||||
// invite
|
||||
@@ -54,6 +57,9 @@ type Module interface {
|
||||
}
|
||||
|
||||
type Getter interface {
|
||||
// Get root user by org id.
|
||||
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
|
||||
|
||||
// Get gets the users based on the given id
|
||||
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)
|
||||
|
||||
|
||||
@@ -183,7 +183,7 @@ type APIHandlerOpts struct {
|
||||
}
|
||||
|
||||
// NewAPIHandler returns an APIHandler
|
||||
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
|
||||
querierOpts := querier.QuerierOptions{
|
||||
Reader: opts.Reader,
|
||||
Cache: opts.Signoz.Cache,
|
||||
@@ -270,6 +270,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// If the root user is enabled, the setup is complete
|
||||
if config.User.Root.Enabled {
|
||||
aH.SetupCompleted = true
|
||||
}
|
||||
|
||||
aH.Upgrader = &websocket.Upgrader{
|
||||
CheckOrigin: func(r *http.Request) bool {
|
||||
return true
|
||||
@@ -2045,7 +2050,7 @@ func (aH *APIHandler) registerUser(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
organization := types.NewOrganization(req.OrgDisplayName)
|
||||
organization := types.NewOrganization(req.OrgDisplayName, req.OrgName)
|
||||
user, errv2 := aH.Signoz.Modules.User.CreateFirstUser(r.Context(), organization, req.Name, req.Email, req.Password)
|
||||
if errv2 != nil {
|
||||
render.Error(w, errv2)
|
||||
|
||||
@@ -135,7 +135,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
|
||||
Signoz: signoz,
|
||||
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
|
||||
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
|
||||
})
|
||||
}, config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -43,11 +43,11 @@ var (
|
||||
// FromUnit returns a converter for the given unit
|
||||
func FromUnit(u Unit) Converter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
|
||||
return DurationConverter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "ZBy", "YBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "ZiBy", "YiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Zbit", "Ybit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
|
||||
return DataConverter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "ZBy/s", "YBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "Zbit/s", "Ybit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "ZiBy/s", "YiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s", "Zibit/s", "Yibit/s":
|
||||
return DataRateConverter
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentConverter
|
||||
|
||||
@@ -58,36 +58,80 @@ func (*dataConverter) Name() string {
|
||||
return "data"
|
||||
}
|
||||
|
||||
// Notation followed by UCUM:
|
||||
// https://ucum.org/ucum
|
||||
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
|
||||
// kilo = k, mega = M, giga = G, tera = T, peta = P
|
||||
// exa = E, zetta = Z, yotta = Y
|
||||
// byte = By, bit = bit
|
||||
func FromDataUnit(u Unit) float64 {
|
||||
switch u {
|
||||
case "bytes", "By": // base 2
|
||||
return Byte
|
||||
case "decbytes": // base 10
|
||||
return Byte
|
||||
case "bits": // base 2
|
||||
case "bits", "bit": // base 2
|
||||
return Bit
|
||||
case "decbits": // base 10
|
||||
return Bit
|
||||
case "kbytes", "kBy": // base 2
|
||||
case "kbytes", "KiBy": // base 2
|
||||
return Kibibyte
|
||||
case "decKbytes", "deckbytes": // base 10
|
||||
case "decKbytes", "deckbytes", "kBy": // base 10
|
||||
return Kilobyte
|
||||
case "mbytes", "MBy": // base 2
|
||||
case "mbytes", "MiBy": // base 2
|
||||
return Mebibyte
|
||||
case "decMbytes", "decmbytes": // base 10
|
||||
case "decMbytes", "decmbytes", "MBy": // base 10
|
||||
return Megabyte
|
||||
case "gbytes", "GBy": // base 2
|
||||
case "gbytes", "GiBy": // base 2
|
||||
return Gibibyte
|
||||
case "decGbytes", "decgbytes": // base 10
|
||||
case "decGbytes", "decgbytes", "GBy": // base 10
|
||||
return Gigabyte
|
||||
case "tbytes", "TBy": // base 2
|
||||
case "tbytes", "TiBy": // base 2
|
||||
return Tebibyte
|
||||
case "decTbytes", "dectbytes": // base 10
|
||||
case "decTbytes", "dectbytes", "TBy": // base 10
|
||||
return Terabyte
|
||||
case "pbytes", "PBy": // base 2
|
||||
case "pbytes", "PiBy": // base 2
|
||||
return Pebibyte
|
||||
case "decPbytes", "decpbytes": // base 10
|
||||
case "decPbytes", "decpbytes", "PBy": // base 10
|
||||
return Petabyte
|
||||
case "EBy": // base 10
|
||||
return Exabyte
|
||||
case "ZBy": // base 10
|
||||
return Zettabyte
|
||||
case "YBy": // base 10
|
||||
return Yottabyte
|
||||
case "Kibit": // base 2
|
||||
return Kibibit
|
||||
case "Mibit": // base 2
|
||||
return Mebibit
|
||||
case "Gibit": // base 2
|
||||
return Gibibit
|
||||
case "Tibit": // base 2
|
||||
return Tebibit
|
||||
case "Pibit": // base 2
|
||||
return Pebibit
|
||||
case "EiBy": // base 2
|
||||
return Exbibyte
|
||||
case "ZiBy": // base 2
|
||||
return Zebibyte
|
||||
case "YiBy": // base 2
|
||||
return Yobibyte
|
||||
case "kbit": // base 10
|
||||
return Kilobit
|
||||
case "Mbit": // base 10
|
||||
return Megabit
|
||||
case "Gbit": // base 10
|
||||
return Gigabit
|
||||
case "Tbit": // base 10
|
||||
return Terabit
|
||||
case "Pbit": // base 10
|
||||
return Petabit
|
||||
case "Ebit": // base 10
|
||||
return Exabit
|
||||
case "Zbit": // base 10
|
||||
return Zettabit
|
||||
case "Ybit": // base 10
|
||||
return Yottabit
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -54,6 +54,12 @@ func (*dataRateConverter) Name() string {
|
||||
return "data_rate"
|
||||
}
|
||||
|
||||
// Notation followed by UCUM:
|
||||
// https://ucum.org/ucum
|
||||
// kibi = Ki, mebi = Mi, gibi = Gi, tebi = Ti, pibi = Pi
|
||||
// kilo = k, mega = M, giga = G, tera = T, peta = P
|
||||
// exa = E, zetta = Z, yotta = Y
|
||||
// byte = By, bit = bit
|
||||
func FromDataRateUnit(u Unit) float64 {
|
||||
// See https://github.com/SigNoz/signoz/blob/5a81f5f90b34845f5b4b3bdd46acf29d04bf3987/frontend/src/container/NewWidget/RightContainer/dataFormatCategories.ts#L62-L85
|
||||
switch u {
|
||||
@@ -65,46 +71,70 @@ func FromDataRateUnit(u Unit) float64 {
|
||||
return BitPerSecond
|
||||
case "bps", "bit/s": // bits/sec(SI)
|
||||
return BitPerSecond
|
||||
case "KiBs": // kibibytes/sec
|
||||
case "KiBs", "KiBy/s": // kibibytes/sec
|
||||
return KibibytePerSecond
|
||||
case "Kibits": // kibibits/sec
|
||||
case "Kibits", "Kibit/s": // kibibits/sec
|
||||
return KibibitPerSecond
|
||||
case "KBs", "kBy/s": // kilobytes/sec
|
||||
return KilobytePerSecond
|
||||
case "Kbits", "kbit/s": // kilobits/sec
|
||||
return KilobitPerSecond
|
||||
case "MiBs": // mebibytes/sec
|
||||
case "MiBs", "MiBy/s": // mebibytes/sec
|
||||
return MebibytePerSecond
|
||||
case "Mibits": // mebibits/sec
|
||||
case "Mibits", "Mibit/s": // mebibits/sec
|
||||
return MebibitPerSecond
|
||||
case "MBs", "MBy/s": // megabytes/sec
|
||||
return MegabytePerSecond
|
||||
case "Mbits", "Mbit/s": // megabits/sec
|
||||
return MegabitPerSecond
|
||||
case "GiBs": // gibibytes/sec
|
||||
case "GiBs", "GiBy/s": // gibibytes/sec
|
||||
return GibibytePerSecond
|
||||
case "Gibits": // gibibits/sec
|
||||
case "Gibits", "Gibit/s": // gibibits/sec
|
||||
return GibibitPerSecond
|
||||
case "GBs", "GBy/s": // gigabytes/sec
|
||||
return GigabytePerSecond
|
||||
case "Gbits", "Gbit/s": // gigabits/sec
|
||||
return GigabitPerSecond
|
||||
case "TiBs": // tebibytes/sec
|
||||
case "TiBs", "TiBy/s": // tebibytes/sec
|
||||
return TebibytePerSecond
|
||||
case "Tibits": // tebibits/sec
|
||||
case "Tibits", "Tibit/s": // tebibits/sec
|
||||
return TebibitPerSecond
|
||||
case "TBs", "TBy/s": // terabytes/sec
|
||||
return TerabytePerSecond
|
||||
case "Tbits", "Tbit/s": // terabits/sec
|
||||
return TerabitPerSecond
|
||||
case "PiBs": // pebibytes/sec
|
||||
case "PiBs", "PiBy/s": // pebibytes/sec
|
||||
return PebibytePerSecond
|
||||
case "Pibits": // pebibits/sec
|
||||
case "Pibits", "Pibit/s": // pebibits/sec
|
||||
return PebibitPerSecond
|
||||
case "PBs", "PBy/s": // petabytes/sec
|
||||
return PetabytePerSecond
|
||||
case "Pbits", "Pbit/s": // petabits/sec
|
||||
return PetabitPerSecond
|
||||
case "EBy/s": // exabytes/sec
|
||||
return ExabytePerSecond
|
||||
case "Ebit/s": // exabits/sec
|
||||
return ExabitPerSecond
|
||||
case "EiBy/s": // exbibytes/sec
|
||||
return ExbibytePerSecond
|
||||
case "Eibit/s": // exbibits/sec
|
||||
return ExbibitPerSecond
|
||||
case "ZBy/s": // zettabytes/sec
|
||||
return ZettabytePerSecond
|
||||
case "Zbit/s": // zettabits/sec
|
||||
return ZettabitPerSecond
|
||||
case "ZiBy/s": // zebibytes/sec
|
||||
return ZebibytePerSecond
|
||||
case "Zibit/s": // zebibits/sec
|
||||
return ZebibitPerSecond
|
||||
case "YBy/s": // yottabytes/sec
|
||||
return YottabytePerSecond
|
||||
case "Ybit/s": // yottabits/sec
|
||||
return YottabitPerSecond
|
||||
case "YiBy/s": // yobibytes/sec
|
||||
return YobibytePerSecond
|
||||
case "Yibit/s": // yobibits/sec
|
||||
return YobibitPerSecond
|
||||
default:
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -75,3 +75,83 @@ func TestDataRate(t *testing.T) {
|
||||
// 1024 * 1024 * 1024 bytes = 1 gbytes
|
||||
assert.Equal(t, Value{F: 1, U: "GiBs"}, dataRateConverter.Convert(Value{F: 1024 * 1024 * 1024, U: "binBps"}, "GiBs"))
|
||||
}
|
||||
|
||||
func TestDataRateConversionUCUMUnit(t *testing.T) {
|
||||
dataRateConverter := NewDataRateConverter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input Value
|
||||
toUnit Unit
|
||||
expected Value
|
||||
}{
|
||||
// Binary byte scaling
|
||||
{name: "Binary byte scaling: 1024 By/s = 1 KiBy/s", input: Value{F: 1024, U: "By/s"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
|
||||
{name: "Kibibyte to bytes: 1 KiBy/s = 1024 By/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "By/s", expected: Value{F: 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 KiBy/s = 1 MiBy/s", input: Value{F: 1024, U: "KiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1, U: "MiBy/s"}},
|
||||
{name: "Gibibyte to bytes: 1 GiBy/s = 1073741824 By/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 MiBy/s = 1 GiBy/s", input: Value{F: 1024, U: "MiBy/s"}, toUnit: "GiBy/s", expected: Value{F: 1, U: "GiBy/s"}},
|
||||
{name: "Gibibyte to mebibyte: 1 GiBy/s = 1024 MiBy/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "MiBy/s", expected: Value{F: 1024, U: "MiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 GiBy/s = 1 TiBy/s", input: Value{F: 1024, U: "GiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1, U: "TiBy/s"}},
|
||||
{name: "Tebibyte to bytes: 1 TiBy/s = 1099511627776 By/s", input: Value{F: 1, U: "TiBy/s"}, toUnit: "By/s", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By/s"}},
|
||||
{name: "Binary byte scaling: 1024 TiBy/s = 1 PiBy/s", input: Value{F: 1024, U: "TiBy/s"}, toUnit: "PiBy/s", expected: Value{F: 1, U: "PiBy/s"}},
|
||||
{name: "Pebibyte to tebibyte: 1 PiBy/s = 1024 TiBy/s", input: Value{F: 1, U: "PiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024, U: "TiBy/s"}},
|
||||
// Binary bit scaling
|
||||
{name: "Binary bit scaling: 1024 bit/s = 1 Kibit/s", input: Value{F: 1024, U: "bit/s"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
|
||||
{name: "Kibibit to bits: 1 Kibit/s = 1024 bit/s", input: Value{F: 1, U: "Kibit/s"}, toUnit: "bit/s", expected: Value{F: 1024, U: "bit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Kibit/s = 1 Mibit/s", input: Value{F: 1024, U: "Kibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1, U: "Mibit/s"}},
|
||||
{name: "Gibibit to bits: 1 Gibit/s = 1073741824 bit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "bit/s", expected: Value{F: 1024 * 1024 * 1024, U: "bit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Mibit/s = 1 Gibit/s", input: Value{F: 1024, U: "Mibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1, U: "Gibit/s"}},
|
||||
{name: "Gibibit to mebibit: 1 Gibit/s = 1024 Mibit/s", input: Value{F: 1, U: "Gibit/s"}, toUnit: "Mibit/s", expected: Value{F: 1024, U: "Mibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Gibit/s = 1 Tibit/s", input: Value{F: 1024, U: "Gibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1, U: "Tibit/s"}},
|
||||
{name: "Tebibit to gibibit: 1 Tibit/s = 1024 Gibit/s", input: Value{F: 1, U: "Tibit/s"}, toUnit: "Gibit/s", expected: Value{F: 1024, U: "Gibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Tibit/s = 1 Pibit/s", input: Value{F: 1024, U: "Tibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1, U: "Pibit/s"}},
|
||||
{name: "Pebibit to tebibit: 1 Pibit/s = 1024 Tibit/s", input: Value{F: 1, U: "Pibit/s"}, toUnit: "Tibit/s", expected: Value{F: 1024, U: "Tibit/s"}},
|
||||
// Bytes to bits
|
||||
{name: "Bytes to bits: 1 KiBy/s = 8 Kibit/s", input: Value{F: 1, U: "KiBy/s"}, toUnit: "Kibit/s", expected: Value{F: 8, U: "Kibit/s"}},
|
||||
{name: "Bytes to bits: 1 MiBy/s = 8 Mibit/s", input: Value{F: 1, U: "MiBy/s"}, toUnit: "Mibit/s", expected: Value{F: 8, U: "Mibit/s"}},
|
||||
{name: "Bytes to bits: 1 GiBy/s = 8 Gibit/s", input: Value{F: 1, U: "GiBy/s"}, toUnit: "Gibit/s", expected: Value{F: 8, U: "Gibit/s"}},
|
||||
// Unit alias
|
||||
{name: "Unit alias: 1 KiBs = 1 KiBy/s", input: Value{F: 1, U: "KiBs"}, toUnit: "KiBy/s", expected: Value{F: 1, U: "KiBy/s"}},
|
||||
{name: "Unit alias: 1 Kibits = 1 Kibit/s", input: Value{F: 1, U: "Kibits"}, toUnit: "Kibit/s", expected: Value{F: 1, U: "Kibit/s"}},
|
||||
// SI byte scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI byte scaling: 1000 PBy/s = 1 EBy/s", input: Value{F: 1000, U: "PBy/s"}, toUnit: "EBy/s", expected: Value{F: 1, U: "EBy/s"}},
|
||||
{name: "Exabyte to bytes: 1 EBy/s = 1e18 By/s", input: Value{F: 1, U: "EBy/s"}, toUnit: "By/s", expected: Value{F: 1e18, U: "By/s"}},
|
||||
{name: "SI byte scaling: 1000 EBy/s = 1 ZBy/s", input: Value{F: 1000, U: "EBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1, U: "ZBy/s"}},
|
||||
{name: "Zettabyte to petabytes: 1 ZBy/s = 1000000 PBy/s", input: Value{F: 1, U: "ZBy/s"}, toUnit: "PBy/s", expected: Value{F: 1e6, U: "PBy/s"}},
|
||||
{name: "SI byte scaling: 1000 ZBy/s = 1 YBy/s", input: Value{F: 1000, U: "ZBy/s"}, toUnit: "YBy/s", expected: Value{F: 1, U: "YBy/s"}},
|
||||
{name: "Yottabyte to zettabyte: 1 YBy/s = 1000 ZBy/s", input: Value{F: 1, U: "YBy/s"}, toUnit: "ZBy/s", expected: Value{F: 1000, U: "ZBy/s"}},
|
||||
// Binary byte scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary byte scaling: 1024 PiBy/s = 1 EiBy/s", input: Value{F: 1024, U: "PiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1, U: "EiBy/s"}},
|
||||
{name: "Exbibyte to tebibytes: 1 EiBy/s = 1048576 TiBy/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "TiBy/s", expected: Value{F: 1024 * 1024, U: "TiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 EiBy/s = 1 ZiBy/s", input: Value{F: 1024, U: "EiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1, U: "ZiBy/s"}},
|
||||
{name: "Zebibyte to exbibyte: 1 ZiBy/s = 1024 EiBy/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "EiBy/s", expected: Value{F: 1024, U: "EiBy/s"}},
|
||||
{name: "Binary byte scaling: 1024 ZiBy/s = 1 YiBy/s", input: Value{F: 1024, U: "ZiBy/s"}, toUnit: "YiBy/s", expected: Value{F: 1, U: "YiBy/s"}},
|
||||
{name: "Yobibyte to zebibyte: 1 YiBy/s = 1024 ZiBy/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "ZiBy/s", expected: Value{F: 1024, U: "ZiBy/s"}},
|
||||
// SI bit scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI bit scaling: 1000 Pbit/s = 1 Ebit/s", input: Value{F: 1000, U: "Pbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1, U: "Ebit/s"}},
|
||||
{name: "Exabit to gigabits: 1 Ebit/s = 1e9 Gbit/s", input: Value{F: 1, U: "Ebit/s"}, toUnit: "Gbit/s", expected: Value{F: 1e9, U: "Gbit/s"}},
|
||||
{name: "SI bit scaling: 1000 Ebit/s = 1 Zbit/s", input: Value{F: 1000, U: "Ebit/s"}, toUnit: "Zbit/s", expected: Value{F: 1, U: "Zbit/s"}},
|
||||
{name: "Zettabit to exabit: 1 Zbit/s = 1000 Ebit/s", input: Value{F: 1, U: "Zbit/s"}, toUnit: "Ebit/s", expected: Value{F: 1000, U: "Ebit/s"}},
|
||||
{name: "SI bit scaling: 1000 Zbit/s = 1 Ybit/s", input: Value{F: 1000, U: "Zbit/s"}, toUnit: "Ybit/s", expected: Value{F: 1, U: "Ybit/s"}},
|
||||
{name: "Yottabit to zettabit: 1 Ybit/s = 1000 Zbit/s", input: Value{F: 1, U: "Ybit/s"}, toUnit: "Zbit/s", expected: Value{F: 1000, U: "Zbit/s"}},
|
||||
// Binary bit scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary bit scaling: 1024 Pibit/s = 1 Eibit/s", input: Value{F: 1024, U: "Pibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1, U: "Eibit/s"}},
|
||||
{name: "Exbibit to pebibit: 1 Eibit/s = 1024 Pibit/s", input: Value{F: 1, U: "Eibit/s"}, toUnit: "Pibit/s", expected: Value{F: 1024, U: "Pibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Eibit/s = 1 Zibit/s", input: Value{F: 1024, U: "Eibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1, U: "Zibit/s"}},
|
||||
{name: "Zebibit to exbibit: 1 Zibit/s = 1024 Eibit/s", input: Value{F: 1, U: "Zibit/s"}, toUnit: "Eibit/s", expected: Value{F: 1024, U: "Eibit/s"}},
|
||||
{name: "Binary bit scaling: 1024 Zibit/s = 1 Yibit/s", input: Value{F: 1024, U: "Zibit/s"}, toUnit: "Yibit/s", expected: Value{F: 1, U: "Yibit/s"}},
|
||||
{name: "Yobibit to zebibit: 1 Yibit/s = 1024 Zibit/s", input: Value{F: 1, U: "Yibit/s"}, toUnit: "Zibit/s", expected: Value{F: 1024, U: "Zibit/s"}},
|
||||
// Bytes to bits (Exbi, Zebi, Yobi)
|
||||
{name: "Bytes to bits: 1 EiBy/s = 8 Eibit/s", input: Value{F: 1, U: "EiBy/s"}, toUnit: "Eibit/s", expected: Value{F: 8, U: "Eibit/s"}},
|
||||
{name: "Bytes to bits: 1 ZiBy/s = 8 Zibit/s", input: Value{F: 1, U: "ZiBy/s"}, toUnit: "Zibit/s", expected: Value{F: 8, U: "Zibit/s"}},
|
||||
{name: "Bytes to bits: 1 YiBy/s = 8 Yibit/s", input: Value{F: 1, U: "YiBy/s"}, toUnit: "Yibit/s", expected: Value{F: 8, U: "Yibit/s"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataRateConverter.Convert(tt.input, tt.toUnit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1, U: "By"}, dataConverter.Convert(Value{F: 8, U: "bits"}, "By"))
|
||||
// 1024 bytes = 1 kbytes
|
||||
assert.Equal(t, Value{F: 1, U: "kbytes"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1024, U: "bytes"}, "kBy"))
|
||||
assert.Equal(t, Value{F: 1, U: "kBy"}, dataConverter.Convert(Value{F: 1000, U: "bytes"}, "kBy"))
|
||||
// 1 byte = 8 bits
|
||||
assert.Equal(t, Value{F: 8, U: "bits"}, dataConverter.Convert(Value{F: 1, U: "bytes"}, "bits"))
|
||||
// 1 mbytes = 1024 kbytes
|
||||
@@ -22,7 +22,7 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "kbytes"}, "bytes"))
|
||||
// 1024 kbytes = 1 mbytes
|
||||
assert.Equal(t, Value{F: 1, U: "mbytes"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "mbytes"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1024, U: "kbytes"}, "MBy"))
|
||||
assert.Equal(t, Value{F: 1, U: "MBy"}, dataConverter.Convert(Value{F: 1000, U: "kBy"}, "MBy"))
|
||||
// 1 mbytes = 1024 * 1024 bytes
|
||||
assert.Equal(t, Value{F: 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "mbytes"}, "bytes"))
|
||||
// 1024 mbytes = 1 gbytes
|
||||
@@ -45,10 +45,90 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1024 * 1024 * 1024 * 1024, U: "bytes"}, dataConverter.Convert(Value{F: 1, U: "tbytes"}, "bytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "pbytes"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "pbytes"))
|
||||
// 1024 tbytes = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1, U: "PBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PBy"))
|
||||
// 1024 tbytes = 1 PiBy
|
||||
assert.Equal(t, Value{F: 1, U: "PiBy"}, dataConverter.Convert(Value{F: 1024, U: "tbytes"}, "PiBy"))
|
||||
// 1 pbytes = 1024 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "tbytes"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "tbytes"))
|
||||
// 1024 pbytes = 1 tbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TBy"))
|
||||
// 1024 TiBy = 1 pbytes
|
||||
assert.Equal(t, Value{F: 1024, U: "TiBy"}, dataConverter.Convert(Value{F: 1, U: "pbytes"}, "TiBy"))
|
||||
}
|
||||
|
||||
func TestDataConversionUCUMUnit(t *testing.T) {
|
||||
dataConverter := NewDataConverter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input Value
|
||||
toUnit Unit
|
||||
expected Value
|
||||
}{
|
||||
// Bits to bytes
|
||||
{name: "Bits to bytes: 8 bit = 1 By", input: Value{F: 8, U: "bit"}, toUnit: "By", expected: Value{F: 1, U: "By"}},
|
||||
{name: "Byte to bits: 1 By = 8 bit", input: Value{F: 1, U: "By"}, toUnit: "bit", expected: Value{F: 8, U: "bit"}},
|
||||
// Binary byte scaling
|
||||
{name: "Binary byte scaling: 1024 By = 1 KiBy", input: Value{F: 1024, U: "By"}, toUnit: "KiBy", expected: Value{F: 1, U: "KiBy"}},
|
||||
{name: "Kibibyte to bytes: 1 KiBy = 1024 By", input: Value{F: 1, U: "KiBy"}, toUnit: "By", expected: Value{F: 1024, U: "By"}},
|
||||
{name: "Binary byte scaling: 1024 KiBy = 1 MiBy", input: Value{F: 1024, U: "KiBy"}, toUnit: "MiBy", expected: Value{F: 1, U: "MiBy"}},
|
||||
{name: "Binary byte scaling: 1024 MiBy = 1 GiBy", input: Value{F: 1024, U: "MiBy"}, toUnit: "GiBy", expected: Value{F: 1, U: "GiBy"}},
|
||||
{name: "Gibibyte to mebibyte: 1 GiBy = 1024 MiBy", input: Value{F: 1, U: "GiBy"}, toUnit: "MiBy", expected: Value{F: 1024, U: "MiBy"}},
|
||||
{name: "Binary byte scaling: 1024 GiBy = 1 TiBy", input: Value{F: 1024, U: "GiBy"}, toUnit: "TiBy", expected: Value{F: 1, U: "TiBy"}},
|
||||
{name: "Binary byte scaling: 1024 TiBy = 1 PiBy", input: Value{F: 1024, U: "TiBy"}, toUnit: "PiBy", expected: Value{F: 1, U: "PiBy"}},
|
||||
{name: "Pebibyte to tebibyte: 1 PiBy = 1024 TiBy", input: Value{F: 1, U: "PiBy"}, toUnit: "TiBy", expected: Value{F: 1024, U: "TiBy"}},
|
||||
{name: "Gibibyte to bytes: 1 GiBy = 1073741824 By", input: Value{F: 1, U: "GiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024, U: "By"}},
|
||||
{name: "Tebibyte to bytes: 1 TiBy = 1099511627776 By", input: Value{F: 1, U: "TiBy"}, toUnit: "By", expected: Value{F: 1024 * 1024 * 1024 * 1024, U: "By"}},
|
||||
// SI bit scaling
|
||||
{name: "SI bit scaling: 1000 bit = 1 kbit", input: Value{F: 1000, U: "bit"}, toUnit: "kbit", expected: Value{F: 1, U: "kbit"}},
|
||||
{name: "Kilobit to bits: 1 kbit = 1000 bit", input: Value{F: 1, U: "kbit"}, toUnit: "bit", expected: Value{F: 1000, U: "bit"}},
|
||||
{name: "SI bit scaling: 1000 kbit = 1 Mbit", input: Value{F: 1000, U: "kbit"}, toUnit: "Mbit", expected: Value{F: 1, U: "Mbit"}},
|
||||
{name: "Gigabit to bits: 1 Gbit = 1000000000 bit", input: Value{F: 1, U: "Gbit"}, toUnit: "bit", expected: Value{F: 1000 * 1000 * 1000, U: "bit"}},
|
||||
{name: "SI bit scaling: 1000 Mbit = 1 Gbit", input: Value{F: 1000, U: "Mbit"}, toUnit: "Gbit", expected: Value{F: 1, U: "Gbit"}},
|
||||
{name: "Gigabit to megabit: 1 Gbit = 1000 Mbit", input: Value{F: 1, U: "Gbit"}, toUnit: "Mbit", expected: Value{F: 1000, U: "Mbit"}},
|
||||
{name: "SI bit scaling: 1000 Gbit = 1 Tbit", input: Value{F: 1000, U: "Gbit"}, toUnit: "Tbit", expected: Value{F: 1, U: "Tbit"}},
|
||||
{name: "Terabit to gigabit: 1 Tbit = 1000 Gbit", input: Value{F: 1, U: "Tbit"}, toUnit: "Gbit", expected: Value{F: 1000, U: "Gbit"}},
|
||||
{name: "SI bit scaling: 1000 Tbit = 1 Pbit", input: Value{F: 1000, U: "Tbit"}, toUnit: "Pbit", expected: Value{F: 1, U: "Pbit"}},
|
||||
{name: "Petabit to terabit: 1 Pbit = 1000 Tbit", input: Value{F: 1, U: "Pbit"}, toUnit: "Tbit", expected: Value{F: 1000, U: "Tbit"}},
|
||||
// Binary bit scaling
|
||||
{name: "Binary bit scaling: 1024 bit = 1 Kibit", input: Value{F: 1024, U: "bit"}, toUnit: "Kibit", expected: Value{F: 1, U: "Kibit"}},
|
||||
{name: "Kibibit to bits: 1 Kibit = 1024 bit", input: Value{F: 1, U: "Kibit"}, toUnit: "bit", expected: Value{F: 1024, U: "bit"}},
|
||||
{name: "Binary bit scaling: 1024 Kibit = 1 Mibit", input: Value{F: 1024, U: "Kibit"}, toUnit: "Mibit", expected: Value{F: 1, U: "Mibit"}},
|
||||
{name: "Mebibit to kibibit: 1 Mibit = 1024 Kibit", input: Value{F: 1, U: "Mibit"}, toUnit: "Kibit", expected: Value{F: 1024, U: "Kibit"}},
|
||||
{name: "Binary bit scaling: 1024 Mibit = 1 Gibit", input: Value{F: 1024, U: "Mibit"}, toUnit: "Gibit", expected: Value{F: 1, U: "Gibit"}},
|
||||
{name: "Gibibit to mebibit: 1 Gibit = 1024 Mibit", input: Value{F: 1, U: "Gibit"}, toUnit: "Mibit", expected: Value{F: 1024, U: "Mibit"}},
|
||||
{name: "Binary bit scaling: 1024 Gibit = 1 Tibit", input: Value{F: 1024, U: "Gibit"}, toUnit: "Tibit", expected: Value{F: 1, U: "Tibit"}},
|
||||
{name: "Tebibit to gibibit: 1 Tibit = 1024 Gibit", input: Value{F: 1, U: "Tibit"}, toUnit: "Gibit", expected: Value{F: 1024, U: "Gibit"}},
|
||||
{name: "Binary bit scaling: 1024 Tibit = 1 Pibit", input: Value{F: 1024, U: "Tibit"}, toUnit: "Pibit", expected: Value{F: 1, U: "Pibit"}},
|
||||
{name: "Pebibit to tebibit: 1 Pibit = 1024 Tibit", input: Value{F: 1, U: "Pibit"}, toUnit: "Tibit", expected: Value{F: 1024, U: "Tibit"}},
|
||||
// Bytes to bits
|
||||
{name: "Bytes to bits: 1 KiBy = 8 Kibit", input: Value{F: 1, U: "KiBy"}, toUnit: "Kibit", expected: Value{F: 8, U: "Kibit"}},
|
||||
{name: "Bytes to bits: 1 MiBy = 8 Mibit", input: Value{F: 1, U: "MiBy"}, toUnit: "Mibit", expected: Value{F: 8, U: "Mibit"}},
|
||||
{name: "Bytes to bits: 1 GiBy = 8 Gibit", input: Value{F: 1, U: "GiBy"}, toUnit: "Gibit", expected: Value{F: 8, U: "Gibit"}},
|
||||
// SI byte scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI byte scaling: 1000 PBy = 1 EBy", input: Value{F: 1000, U: "PBy"}, toUnit: "EBy", expected: Value{F: 1, U: "EBy"}},
|
||||
{name: "Exabyte to bytes: 1 EBy = 1e18 By", input: Value{F: 1, U: "EBy"}, toUnit: "By", expected: Value{F: 1e18, U: "By"}},
|
||||
{name: "SI byte scaling: 1000 EBy = 1 ZBy", input: Value{F: 1000, U: "EBy"}, toUnit: "ZBy", expected: Value{F: 1, U: "ZBy"}},
|
||||
{name: "Zettabyte to petabytes: 1 ZBy = 1000000 PBy", input: Value{F: 1, U: "ZBy"}, toUnit: "PBy", expected: Value{F: 1e6, U: "PBy"}},
|
||||
{name: "SI byte scaling: 1000 ZBy = 1 YBy", input: Value{F: 1000, U: "ZBy"}, toUnit: "YBy", expected: Value{F: 1, U: "YBy"}},
|
||||
{name: "Yottabyte to zettabyte: 1 YBy = 1000 ZBy", input: Value{F: 1, U: "YBy"}, toUnit: "ZBy", expected: Value{F: 1000, U: "ZBy"}},
|
||||
// Binary byte scaling (Exbi, Zebi, Yobi)
|
||||
{name: "Binary byte scaling: 1024 PiBy = 1 EiBy", input: Value{F: 1024, U: "PiBy"}, toUnit: "EiBy", expected: Value{F: 1, U: "EiBy"}},
|
||||
{name: "Exbibyte to tebibytes: 1 EiBy = 1048576 TiBy", input: Value{F: 1, U: "EiBy"}, toUnit: "TiBy", expected: Value{F: 1024 * 1024, U: "TiBy"}},
|
||||
{name: "Binary byte scaling: 1024 EiBy = 1 ZiBy", input: Value{F: 1024, U: "EiBy"}, toUnit: "ZiBy", expected: Value{F: 1, U: "ZiBy"}},
|
||||
{name: "Zebibyte to exbibyte: 1 ZiBy = 1024 EiBy", input: Value{F: 1, U: "ZiBy"}, toUnit: "EiBy", expected: Value{F: 1024, U: "EiBy"}},
|
||||
{name: "Binary byte scaling: 1024 ZiBy = 1 YiBy", input: Value{F: 1024, U: "ZiBy"}, toUnit: "YiBy", expected: Value{F: 1, U: "YiBy"}},
|
||||
{name: "Yobibyte to zebibyte: 1 YiBy = 1024 ZiBy", input: Value{F: 1, U: "YiBy"}, toUnit: "ZiBy", expected: Value{F: 1024, U: "ZiBy"}},
|
||||
// SI bit scaling (Exa, Zetta, Yotta)
|
||||
{name: "SI bit scaling: 1000 Pbit = 1 Ebit", input: Value{F: 1000, U: "Pbit"}, toUnit: "Ebit", expected: Value{F: 1, U: "Ebit"}},
|
||||
{name: "Exabit to gigabits: 1 Ebit = 1e9 Gbit", input: Value{F: 1, U: "Ebit"}, toUnit: "Gbit", expected: Value{F: 1e9, U: "Gbit"}},
|
||||
{name: "SI bit scaling: 1000 Ebit = 1 Zbit", input: Value{F: 1000, U: "Ebit"}, toUnit: "Zbit", expected: Value{F: 1, U: "Zbit"}},
|
||||
{name: "Zettabit to exabit: 1 Zbit = 1000 Ebit", input: Value{F: 1, U: "Zbit"}, toUnit: "Ebit", expected: Value{F: 1000, U: "Ebit"}},
|
||||
{name: "SI bit scaling: 1000 Zbit = 1 Ybit", input: Value{F: 1000, U: "Zbit"}, toUnit: "Ybit", expected: Value{F: 1, U: "Ybit"}},
|
||||
{name: "Yottabit to zettabit: 1 Ybit = 1000 Zbit", input: Value{F: 1, U: "Ybit"}, toUnit: "Zbit", expected: Value{F: 1000, U: "Zbit"}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataConverter.Convert(tt.input, tt.toUnit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ func FromTimeUnit(u Unit) Duration {
|
||||
return Hour
|
||||
case "d":
|
||||
return Day
|
||||
case "w":
|
||||
case "w", "wk":
|
||||
return Week
|
||||
default:
|
||||
return Second
|
||||
|
||||
@@ -54,4 +54,13 @@ func TestDurationConvert(t *testing.T) {
|
||||
assert.Equal(t, Value{F: 1, U: "ms"}, timeConverter.Convert(Value{F: 1000, U: "us"}, "ms"))
|
||||
// 1000000000 ns = 1 s
|
||||
assert.Equal(t, Value{F: 1, U: "s"}, timeConverter.Convert(Value{F: 1000000000, U: "ns"}, "s"))
|
||||
|
||||
// 7 d = 1 wk
|
||||
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 7, U: "d"}, "wk"))
|
||||
// 1 wk = 7 d
|
||||
assert.Equal(t, Value{F: 7, U: "d"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "d"))
|
||||
// 1 wk = 168 h
|
||||
assert.Equal(t, Value{F: 168, U: "h"}, timeConverter.Convert(Value{F: 1, U: "wk"}, "h"))
|
||||
// 604800 s = 1 wk
|
||||
assert.Equal(t, Value{F: 1, U: "wk"}, timeConverter.Convert(Value{F: 604800, U: "s"}, "wk"))
|
||||
}
|
||||
|
||||
@@ -24,30 +24,62 @@ func (f *dataFormatter) Format(value float64, unit string) string {
|
||||
return humanize.IBytes(uint64(value))
|
||||
case "decbytes":
|
||||
return humanize.Bytes(uint64(value))
|
||||
case "bits":
|
||||
return humanize.IBytes(uint64(value * converter.Bit))
|
||||
case "bits", "bit":
|
||||
// humanize.IBytes/Bytes doesn't support bits
|
||||
// and returns 0 B for values less than a byte
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b", value)
|
||||
}
|
||||
return humanize.IBytes(uint64(value / 8))
|
||||
case "decbits":
|
||||
return humanize.Bytes(uint64(value * converter.Bit))
|
||||
case "kbytes", "kBy":
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b", value)
|
||||
}
|
||||
return humanize.Bytes(uint64(value / 8))
|
||||
case "kbytes", "KiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit))
|
||||
case "decKbytes", "deckbytes":
|
||||
return humanize.IBytes(uint64(value * converter.Kilobit))
|
||||
case "mbytes", "MBy":
|
||||
case "Kibit":
|
||||
return humanize.IBytes(uint64(value * converter.Kibibit / 8))
|
||||
case "decKbytes", "deckbytes", "kBy":
|
||||
return humanize.Bytes(uint64(value * converter.Kilobit))
|
||||
case "kbit":
|
||||
return humanize.Bytes(uint64(value * converter.Kilobit / 8))
|
||||
case "mbytes", "MiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit))
|
||||
case "decMbytes", "decmbytes":
|
||||
case "Mibit":
|
||||
return humanize.IBytes(uint64(value * converter.Mebibit / 8))
|
||||
case "decMbytes", "decmbytes", "MBy":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit))
|
||||
case "gbytes", "GBy":
|
||||
case "Mbit":
|
||||
return humanize.Bytes(uint64(value * converter.Megabit / 8))
|
||||
case "gbytes", "GiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit))
|
||||
case "decGbytes", "decgbytes":
|
||||
case "Gibit":
|
||||
return humanize.IBytes(uint64(value * converter.Gibibit / 8))
|
||||
case "decGbytes", "decgbytes", "GBy":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit))
|
||||
case "tbytes", "TBy":
|
||||
case "Gbit":
|
||||
return humanize.Bytes(uint64(value * converter.Gigabit / 8))
|
||||
case "tbytes", "TiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit))
|
||||
case "decTbytes", "dectbytes":
|
||||
case "Tibit":
|
||||
return humanize.IBytes(uint64(value * converter.Tebibit / 8))
|
||||
case "decTbytes", "dectbytes", "TBy":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit))
|
||||
case "pbytes", "PBy":
|
||||
case "Tbit":
|
||||
return humanize.Bytes(uint64(value * converter.Terabit / 8))
|
||||
case "pbytes", "PiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Pebibit))
|
||||
case "decPbytes", "decpbytes":
|
||||
case "Pbit":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit / 8))
|
||||
case "decPbytes", "decpbytes", "PBy":
|
||||
return humanize.Bytes(uint64(value * converter.Petabit))
|
||||
case "EiBy":
|
||||
return humanize.IBytes(uint64(value * converter.Exbibit))
|
||||
case "Ebit":
|
||||
return humanize.Bytes(uint64(value * converter.Exabit / 8))
|
||||
case "EBy":
|
||||
return humanize.Bytes(uint64(value * converter.Exabit))
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
return fmt.Sprintf("%v", value)
|
||||
|
||||
@@ -25,49 +25,66 @@ func (f *dataRateFormatter) Format(value float64, unit string) string {
|
||||
case "Bps", "By/s":
|
||||
return humanize.Bytes(uint64(value)) + "/s"
|
||||
case "binbps":
|
||||
return humanize.IBytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
// humanize.IBytes/Bytes doesn't support bits
|
||||
// and returns 0 B for values less than a byte
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b/s", value)
|
||||
}
|
||||
return humanize.IBytes(uint64(value/8)) + "/s"
|
||||
case "bps", "bit/s":
|
||||
return humanize.Bytes(uint64(value*converter.BitPerSecond)) + "/s"
|
||||
case "KiBs":
|
||||
if value < 8 {
|
||||
return fmt.Sprintf("%v b/s", value)
|
||||
}
|
||||
return humanize.Bytes(uint64(value/8)) + "/s"
|
||||
case "KiBs", "KiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond)) + "/s"
|
||||
case "Kibits":
|
||||
return humanize.IBytes(uint64(value*converter.KibibytePerSecond)) + "/s"
|
||||
case "Kibits", "Kibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KibibitPerSecond/8)) + "/s"
|
||||
case "KBs", "kBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobitPerSecond)) + "/s"
|
||||
case "Kbits", "kbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.KilobytePerSecond)) + "/s"
|
||||
case "MiBs":
|
||||
return humanize.Bytes(uint64(value*converter.KilobitPerSecond/8)) + "/s"
|
||||
case "MiBs", "MiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond)) + "/s"
|
||||
case "Mibits":
|
||||
return humanize.IBytes(uint64(value*converter.MebibytePerSecond)) + "/s"
|
||||
case "Mibits", "Mibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MebibitPerSecond/8)) + "/s"
|
||||
case "MBs", "MBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabitPerSecond)) + "/s"
|
||||
case "Mbits", "Mbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.MegabytePerSecond)) + "/s"
|
||||
case "GiBs":
|
||||
return humanize.Bytes(uint64(value*converter.MegabitPerSecond/8)) + "/s"
|
||||
case "GiBs", "GiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond)) + "/s"
|
||||
case "Gibits":
|
||||
return humanize.IBytes(uint64(value*converter.GibibytePerSecond)) + "/s"
|
||||
case "Gibits", "Gibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GibibitPerSecond/8)) + "/s"
|
||||
case "GBs", "GBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabitPerSecond)) + "/s"
|
||||
case "Gbits", "Gbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.GigabytePerSecond)) + "/s"
|
||||
case "TiBs":
|
||||
return humanize.Bytes(uint64(value*converter.GigabitPerSecond/8)) + "/s"
|
||||
case "TiBs", "TiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond)) + "/s"
|
||||
case "Tibits":
|
||||
return humanize.IBytes(uint64(value*converter.TebibytePerSecond)) + "/s"
|
||||
case "Tibits", "Tibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TebibitPerSecond/8)) + "/s"
|
||||
case "TBs", "TBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabitPerSecond)) + "/s"
|
||||
case "Tbits", "Tbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.TerabytePerSecond)) + "/s"
|
||||
case "PiBs":
|
||||
return humanize.Bytes(uint64(value*converter.TerabitPerSecond/8)) + "/s"
|
||||
case "PiBs", "PiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond)) + "/s"
|
||||
case "Pibits":
|
||||
return humanize.IBytes(uint64(value*converter.PebibytePerSecond)) + "/s"
|
||||
case "Pibits", "Pibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PebibitPerSecond/8)) + "/s"
|
||||
case "PBs", "PBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabitPerSecond)) + "/s"
|
||||
case "Pbits", "Pbit/s":
|
||||
return humanize.IBytes(uint64(value*converter.PetabytePerSecond)) + "/s"
|
||||
return humanize.Bytes(uint64(value*converter.PetabitPerSecond/8)) + "/s"
|
||||
// Exa units
|
||||
case "EBy/s":
|
||||
return humanize.Bytes(uint64(value*converter.ExabitPerSecond)) + "/s"
|
||||
case "Ebit/s":
|
||||
return humanize.Bytes(uint64(value*converter.ExabitPerSecond/8)) + "/s"
|
||||
case "EiBy/s":
|
||||
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond)) + "/s"
|
||||
case "Eibit/s":
|
||||
return humanize.IBytes(uint64(value*converter.ExbibitPerSecond/8)) + "/s"
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
return fmt.Sprintf("%v", value)
|
||||
|
||||
150
pkg/query-service/formatter/data_rate_test.go
Normal file
150
pkg/query-service/formatter/data_rate_test.go
Normal file
@@ -0,0 +1,150 @@
|
||||
package formatter
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDataRateFormatterComprehensive(t *testing.T) {
|
||||
dataRateFormatter := NewDataRateFormatter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value float64
|
||||
unit string
|
||||
expected string
|
||||
}{
|
||||
// IEC Bits/sec - binbps, bps
|
||||
{name: "binbps as Bps", value: 7, unit: "binbps", expected: "7 b/s"},
|
||||
{name: "100 binbps as 12 Bps", value: 100, unit: "binbps", expected: "12 B/s"},
|
||||
{name: "binbps as 23 GiBs", value: 8 * 1024 * 1024 * 1024 * 23, unit: "binbps", expected: "23 GiB/s"},
|
||||
|
||||
// SI Bits/sec - bps, bit/s
|
||||
{name: "bps as Bps", value: 5, unit: "bps", expected: "5 b/s"},
|
||||
{name: "200 bitps as 25 Bps", value: 200, unit: "bit/s", expected: "25 B/s"},
|
||||
{name: "bitps as MBs", value: 8 * 1000 * 1000 * 7, unit: "bit/s", expected: "7.0 MB/s"},
|
||||
|
||||
// IEC Base bytes/sec - binBps
|
||||
{name: "binBps as Bps", value: 0, unit: "binBps", expected: "0 B/s"},
|
||||
{name: "1 binBps as 1 Bps", value: 1, unit: "binBps", expected: "1 B/s"},
|
||||
{name: "binBps as Kibps", value: 1024, unit: "binBps", expected: "1.0 KiB/s"},
|
||||
{name: "binBps as Mibps", value: 1024 * 1024, unit: "binBps", expected: "1.0 MiB/s"},
|
||||
{name: "binBps as Gibps", value: 1024 * 1024 * 1024, unit: "binBps", expected: "1.0 GiB/s"},
|
||||
|
||||
// SI Base bytes/sec - Bps, By/s
|
||||
{name: "Bps as Bps", value: 1, unit: "Bps", expected: "1 B/s"},
|
||||
{name: "Bps as kbps", value: 1000, unit: "Bps", expected: "1.0 kB/s"},
|
||||
{name: "Bps as Mbps", value: 1000 * 1000, unit: "Bps", expected: "1.0 MB/s"},
|
||||
{name: "Byps as kbps", value: 1000, unit: "By/s", expected: "1.0 kB/s"},
|
||||
|
||||
// Kibibytes/sec - KiBs, KiBy/s
|
||||
{name: "Kibs as Bps", value: 0, unit: "KiBs", expected: "0 B/s"},
|
||||
{name: "Kibs as Kibps", value: 1, unit: "KiBs", expected: "1.0 KiB/s"},
|
||||
{name: "Kibs as Mibps", value: 1024, unit: "KiBs", expected: "1.0 MiB/s"},
|
||||
{name: "Kibs as Gibps", value: 3 * 1024 * 1024, unit: "KiBs", expected: "3.0 GiB/s"},
|
||||
{name: "KiByps as Kibps", value: 1, unit: "KiBy/s", expected: "1.0 KiB/s"},
|
||||
{name: "KiByps as Mibps", value: 1024, unit: "KiBy/s", expected: "1.0 MiB/s"},
|
||||
|
||||
// Kibibits/sec - Kibits, Kibit/s
|
||||
{name: "Kibitps as Kibps", value: 1, unit: "Kibits", expected: "128 B/s"},
|
||||
{name: "Kibitps as Mibps", value: 42 * 1024, unit: "Kibits", expected: "5.3 MiB/s"},
|
||||
{name: "Kibitps as Kibps 10", value: 10, unit: "Kibit/s", expected: "1.3 KiB/s"},
|
||||
|
||||
// Kilobytes/sec (SI) - KBs, kBy/s
|
||||
{name: "Kbs as Bps", value: 0.5, unit: "KBs", expected: "500 B/s"},
|
||||
{name: "Kbs as Mibps", value: 1048.6, unit: "KBs", expected: "1.0 MiB/s"},
|
||||
{name: "kByps as Bps", value: 1, unit: "kBy/s", expected: "1000 B/s"},
|
||||
|
||||
// Kilobits/sec (SI) - Kbits, kbit/s
|
||||
{name: "Kbitps as Bps", value: 1, unit: "Kbits", expected: "125 B/s"},
|
||||
{name: "kbitps as Bps", value: 1, unit: "kbit/s", expected: "125 B/s"},
|
||||
|
||||
// Mebibytes/sec - MiBs, MiBy/s
|
||||
{name: "Mibs as Mibps", value: 1, unit: "MiBs", expected: "1.0 MiB/s"},
|
||||
{name: "Mibs as Gibps", value: 1024, unit: "MiBs", expected: "1.0 GiB/s"},
|
||||
{name: "Mibs as Tibps", value: 1024 * 1024, unit: "MiBs", expected: "1.0 TiB/s"},
|
||||
{name: "MiByps as Mibps", value: 1, unit: "MiBy/s", expected: "1.0 MiB/s"},
|
||||
|
||||
// Mebibits/sec - Mibits, Mibit/s
|
||||
{name: "Mibitps as Mibps", value: 40, unit: "Mibits", expected: "5.0 MiB/s"},
|
||||
{name: "Mibitps as Mibps per second variant", value: 10, unit: "Mibit/s", expected: "1.3 MiB/s"},
|
||||
|
||||
// Megabytes/sec (SI) - MBs, MBy/s
|
||||
{name: "Mbs as Kibps", value: 1, unit: "MBs", expected: "977 KiB/s"},
|
||||
{name: "MByps as Kibps", value: 1, unit: "MBy/s", expected: "977 KiB/s"},
|
||||
|
||||
// Megabits/sec (SI) - Mbits, Mbit/s
|
||||
{name: "Mbitps as Kibps", value: 1, unit: "Mbits", expected: "125 kB/s"},
|
||||
{name: "Mbitps as Kibps per second variant", value: 1, unit: "Mbit/s", expected: "125 kB/s"},
|
||||
|
||||
// Gibibytes/sec - GiBs, GiBy/s
|
||||
{name: "Gibs as Gibps", value: 1, unit: "GiBs", expected: "1.0 GiB/s"},
|
||||
{name: "Gibs as Tibps", value: 1024, unit: "GiBs", expected: "1.0 TiB/s"},
|
||||
{name: "GiByps as Tibps", value: 42 * 1024, unit: "GiBy/s", expected: "42 TiB/s"},
|
||||
|
||||
// Gibibits/sec - Gibits, Gibit/s
|
||||
{name: "Gibitps as Tibps", value: 42 * 1024, unit: "Gibits", expected: "5.3 TiB/s"},
|
||||
{name: "Gibitps as Tibps per second variant", value: 42 * 1024, unit: "Gibit/s", expected: "5.3 TiB/s"},
|
||||
|
||||
// Gigabytes/sec (SI) - GBs, GBy/s
|
||||
{name: "Gbs as Tibps", value: 42 * 1000, unit: "GBs", expected: "38 TiB/s"},
|
||||
{name: "GByps as Tibps", value: 42 * 1000, unit: "GBy/s", expected: "38 TiB/s"},
|
||||
|
||||
// Gigabits/sec (SI) - Gbits, Gbit/s
|
||||
{name: "Gbitps as Tibps", value: 42 * 1000, unit: "Gbits", expected: "5.3 TB/s"},
|
||||
{name: "Gbitps as Tibps per second variant", value: 42 * 1000, unit: "Gbit/s", expected: "5.3 TB/s"},
|
||||
|
||||
// Tebibytes/sec - TiBs, TiBy/s
|
||||
{name: "Tibs as Tibps", value: 1, unit: "TiBs", expected: "1.0 TiB/s"},
|
||||
{name: "Tibs as Pibps", value: 1024, unit: "TiBs", expected: "1.0 PiB/s"},
|
||||
{name: "TiByps as Pibps", value: 42 * 1024, unit: "TiBy/s", expected: "42 PiB/s"},
|
||||
|
||||
// Tebibits/sec - Tibits, Tibit/s
|
||||
{name: "Tibitps as Pibps", value: 42 * 1024, unit: "Tibits", expected: "5.3 PiB/s"},
|
||||
{name: "Tibitps as Pibps per second variant", value: 42 * 1024, unit: "Tibit/s", expected: "5.3 PiB/s"},
|
||||
|
||||
// Terabytes/sec (SI) - TBs, TBy/s
|
||||
{name: "Tbs as Pibps", value: 42 * 1000, unit: "TBs", expected: "37 PiB/s"},
|
||||
{name: "TByps as Pibps", value: 42 * 1000, unit: "TBy/s", expected: "37 PiB/s"},
|
||||
|
||||
// Terabits/sec (SI) - Tbits, Tbit/s
|
||||
{name: "Tbitps as Pibps", value: 42 * 1000, unit: "Tbits", expected: "5.3 PB/s"},
|
||||
{name: "Tbitps as Pibps per second variant", value: 42 * 1000, unit: "Tbit/s", expected: "5.3 PB/s"},
|
||||
|
||||
// Pebibytes/sec - PiBs, PiBy/s
|
||||
{name: "Pibs as Eibps", value: 10 * 1024, unit: "PiBs", expected: "10 EiB/s"},
|
||||
{name: "PiByps as Eibps", value: 10 * 1024, unit: "PiBy/s", expected: "10 EiB/s"},
|
||||
|
||||
// Pebibits/sec - Pibits, Pibit/s
|
||||
{name: "Pibitps as Eibps", value: 10 * 1024, unit: "Pibits", expected: "1.3 EiB/s"},
|
||||
{name: "Pibitps as Eibps per second variant", value: 10 * 1024, unit: "Pibit/s", expected: "1.3 EiB/s"},
|
||||
|
||||
// Petabytes/sec (SI) - PBs, PBy/s
|
||||
{name: "Pbs as Pibps", value: 42, unit: "PBs", expected: "37 PiB/s"},
|
||||
{name: "PByps as Pibps", value: 42, unit: "PBy/s", expected: "37 PiB/s"},
|
||||
|
||||
// Petabits/sec (SI) - Pbits, Pbit/s
|
||||
{name: "Pbitps as Pibps", value: 42, unit: "Pbits", expected: "5.3 PB/s"},
|
||||
{name: "Pbitps as Pibps per second variant", value: 42, unit: "Pbit/s", expected: "5.3 PB/s"},
|
||||
|
||||
// Exabytes/sec (SI) - EBy/s
|
||||
{name: "EByps as Ebps", value: 10, unit: "EBy/s", expected: "10 EB/s"},
|
||||
|
||||
// Exabits/sec (SI) - Ebit/s
|
||||
{name: "Ebitps as Ebps", value: 10, unit: "Ebit/s", expected: "1.3 EB/s"},
|
||||
|
||||
// Exbibytes/sec (IEC) - EiBy/s
|
||||
{name: "EiByps as Eibps", value: 10, unit: "EiBy/s", expected: "10 EiB/s"},
|
||||
|
||||
// Exbibits/sec (IEC) - Eibit/s
|
||||
{name: "Eibitps as Eibps", value: 10, unit: "Eibit/s", expected: "1.3 EiB/s"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataRateFormatter.Format(tt.value, tt.unit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -14,21 +14,174 @@ func TestData(t *testing.T) {
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "bytes"))
|
||||
assert.Equal(t, "1.0 KiB", dataFormatter.Format(1024, "By"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "mbytes"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MBy"))
|
||||
assert.Equal(t, "2.3 GiB", dataFormatter.Format(2.3*1024, "MiBy"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "bytes"))
|
||||
assert.Equal(t, "1.0 MiB", dataFormatter.Format(1024*1024, "By"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "mbytes"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MBy"))
|
||||
assert.Equal(t, "69 TiB", dataFormatter.Format(69*1024*1024, "MiBy"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "bytes"))
|
||||
assert.Equal(t, "102 KiB", dataFormatter.Format(102*1024, "By"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kbytes"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "kBy"))
|
||||
assert.Equal(t, "240 MiB", dataFormatter.Format(240*1024, "KiBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kbytes"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "kBy"))
|
||||
assert.Equal(t, "1.0 GiB", dataFormatter.Format(1024*1024, "KiBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "kBy"))
|
||||
assert.Equal(t, "23 GiB", dataFormatter.Format(23*1024*1024, "KiBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kbytes"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "kBy"))
|
||||
assert.Equal(t, "32 TiB", dataFormatter.Format(32*1024*1024*1024, "KiBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "mbytes"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MBy"))
|
||||
assert.Equal(t, "24 MiB", dataFormatter.Format(24, "MiBy"))
|
||||
}
|
||||
|
||||
func TestDataFormatterComprehensive(t *testing.T) {
|
||||
dataFormatter := NewDataFormatter()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
value float64
|
||||
unit string
|
||||
expected string
|
||||
}{
|
||||
// IEC Bits - bits, bit
|
||||
{name: "bits: 1", value: 1, unit: "bits", expected: "1 b"},
|
||||
{name: "bits: 7", value: 7, unit: "bit", expected: "7 b"},
|
||||
{name: "bits: to MiB = 8 * 1024 * 1024 * 9", value: 8 * 1024 * 1024 * 9, unit: "bits", expected: "9.0 MiB"},
|
||||
|
||||
// SI Bits - decbits, bit
|
||||
{name: "decbits: 1", value: 1, unit: "decbits", expected: "1 b"},
|
||||
{name: "decbits: 7", value: 7, unit: "decbits", expected: "7 b"},
|
||||
{name: "decbits: to MB = 8 * 1000 * 1000 * 4", value: 8 * 1000 * 1000 * 4, unit: "decbits", expected: "4.0 MB"},
|
||||
|
||||
// IEC Base bytes - bytes, By
|
||||
{name: "bytes: 0", value: 0, unit: "bytes", expected: "0 B"},
|
||||
{name: "bytes: 1", value: 1, unit: "bytes", expected: "1 B"},
|
||||
{name: "bytes: 512", value: 512, unit: "bytes", expected: "512 B"},
|
||||
{name: "bytes: 1023", value: 1023, unit: "bytes", expected: "1023 B"},
|
||||
{name: "bytes: 1024 = 1 KiB", value: 1024, unit: "bytes", expected: "1.0 KiB"},
|
||||
{name: "bytes: 1536", value: 1536, unit: "bytes", expected: "1.5 KiB"},
|
||||
{name: "bytes: 1024*1024 = 1 MiB", value: 1024 * 1024, unit: "bytes", expected: "1.0 MiB"},
|
||||
{name: "bytes: 1024*1024*1024 = 1 GiB", value: 1024 * 1024 * 1024, unit: "bytes", expected: "1.0 GiB"},
|
||||
{name: "By: same as bytes", value: 1024, unit: "By", expected: "1.0 KiB"},
|
||||
|
||||
// SI Base bytes - decbytes
|
||||
{name: "decbytes: 1", value: 1, unit: "decbytes", expected: "1 B"},
|
||||
{name: "decbytes: 1000 = 1 kB", value: 1000, unit: "decbytes", expected: "1.0 kB"},
|
||||
{name: "decbytes: 1000*1000 = 1 MB", value: 1000 * 1000, unit: "decbytes", expected: "1.0 MB"},
|
||||
{name: "decbytes: 1000*1000*1000 = 1 GB", value: 1000 * 1000 * 1000, unit: "decbytes", expected: "1.0 GB"},
|
||||
|
||||
// Kibibytes - kbytes, KiBy (IEC)
|
||||
{name: "kbytes: 0", value: 0, unit: "kbytes", expected: "0 B"},
|
||||
{name: "kbytes: 1 = 1 KiB", value: 1, unit: "kbytes", expected: "1.0 KiB"},
|
||||
{name: "kbytes: 512", value: 512, unit: "kbytes", expected: "512 KiB"},
|
||||
{name: "kbytes: 1024 = 1 MiB", value: 1024, unit: "kbytes", expected: "1.0 MiB"},
|
||||
{name: "kbytes: 1024*1024 = 1 GiB", value: 1024 * 1024, unit: "kbytes", expected: "1.0 GiB"},
|
||||
{name: "kbytes: 2.3*1024 = 2.3 MiB", value: 2.3 * 1024, unit: "kbytes", expected: "2.3 MiB"},
|
||||
{name: "KiBy: 1 = 1 KiB", value: 1, unit: "KiBy", expected: "1.0 KiB"},
|
||||
{name: "KiBy: 1024 = 1 MiB", value: 1024, unit: "KiBy", expected: "1.0 MiB"},
|
||||
{name: "kbytes and KiBy alias", value: 240 * 1024, unit: "KiBy", expected: "240 MiB"},
|
||||
|
||||
// SI Kilobytes - decKbytes, deckbytes, kBy
|
||||
{name: "decKbytes: 1 = 1 kB", value: 1, unit: "decKbytes", expected: "1.0 kB"},
|
||||
{name: "decKbytes: 1000 = 1 MB", value: 1000, unit: "decKbytes", expected: "1.0 MB"},
|
||||
{name: "deckbytes: 1 = 1 kB", value: 1, unit: "deckbytes", expected: "1.0 kB"},
|
||||
{name: "kBy: 1 = 1 kB", value: 1, unit: "kBy", expected: "1.0 kB"},
|
||||
{name: "kBy: 1000 = 1 MB", value: 1000, unit: "kBy", expected: "1.0 MB"},
|
||||
|
||||
// Mebibytes - mbytes, MiBy (IEC)
|
||||
{name: "mbytes: 1 = 1 MiB", value: 1, unit: "mbytes", expected: "1.0 MiB"},
|
||||
{name: "mbytes: 24", value: 24, unit: "mbytes", expected: "24 MiB"},
|
||||
{name: "mbytes: 1024 = 1 GiB", value: 1024, unit: "mbytes", expected: "1.0 GiB"},
|
||||
{name: "mbytes: 1024*1024 = 1 TiB", value: 1024 * 1024, unit: "mbytes", expected: "1.0 TiB"},
|
||||
{name: "mbytes: 69*1024 = 69 GiB", value: 69 * 1024, unit: "mbytes", expected: "69 GiB"},
|
||||
{name: "mbytes: 69*1024*1024 = 69 TiB", value: 69 * 1024 * 1024, unit: "mbytes", expected: "69 TiB"},
|
||||
{name: "MiBy: 1 = 1 MiB", value: 1, unit: "MiBy", expected: "1.0 MiB"},
|
||||
{name: "MiBy: 1024 = 1 GiB", value: 1024, unit: "MiBy", expected: "1.0 GiB"},
|
||||
|
||||
// SI Megabytes - decMbytes, decmbytes, MBy
|
||||
{name: "decMbytes: 1 = 1 MB", value: 1, unit: "decMbytes", expected: "1.0 MB"},
|
||||
{name: "decMbytes: 1000 = 1 GB", value: 1000, unit: "decMbytes", expected: "1.0 GB"},
|
||||
{name: "decmbytes: 1 = 1 MB", value: 1, unit: "decmbytes", expected: "1.0 MB"},
|
||||
{name: "MBy: 1 = 1 MB", value: 1, unit: "MBy", expected: "1.0 MB"},
|
||||
|
||||
// Gibibytes - gbytes, GiBy (IEC)
|
||||
{name: "gbytes: 1 = 1 GiB", value: 1, unit: "gbytes", expected: "1.0 GiB"},
|
||||
{name: "gbytes: 1024 = 1 TiB", value: 1024, unit: "gbytes", expected: "1.0 TiB"},
|
||||
{name: "GiBy: 42*1024 = 42 TiB", value: 42 * 1024, unit: "GiBy", expected: "42 TiB"},
|
||||
|
||||
// SI Gigabytes - decGbytes, decgbytes, GBy
|
||||
{name: "decGbytes: 42*1000 = 42 TB", value: 42 * 1000, unit: "decGbytes", expected: "42 TB"},
|
||||
{name: "GBy: 42*1000 = 42 TB", value: 42 * 1000, unit: "GBy", expected: "42 TB"},
|
||||
|
||||
// Tebibytes - tbytes, TiBy (IEC)
|
||||
{name: "tbytes: 1 = 1 TiB", value: 1, unit: "tbytes", expected: "1.0 TiB"},
|
||||
{name: "tbytes: 1024 = 1 PiB", value: 1024, unit: "tbytes", expected: "1.0 PiB"},
|
||||
{name: "TiBy: 42*1024 = 42 PiB", value: 42 * 1024, unit: "TiBy", expected: "42 PiB"},
|
||||
|
||||
// SI Terabytes - decTbytes, dectbytes, TBy
|
||||
{name: "decTbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "decTbytes", expected: "42 PB"},
|
||||
{name: "dectbytes: 42*1000 = 42 PB", value: 42 * 1000, unit: "dectbytes", expected: "42 PB"},
|
||||
{name: "TBy: 42*1000 = 42 PB", value: 42 * 1000, unit: "TBy", expected: "42 PB"},
|
||||
|
||||
// Pebibytes - pbytes, PiBy (IEC)
|
||||
{name: "pbytes: 10*1024 = 10 EiB", value: 10 * 1024, unit: "pbytes", expected: "10 EiB"},
|
||||
{name: "PiBy: 10*1024 = 10 EiB", value: 10 * 1024, unit: "PiBy", expected: "10 EiB"},
|
||||
|
||||
// SI Petabytes - decPbytes, decpbytes, PBy
|
||||
{name: "decPbytes: 42 = 42 PB", value: 42, unit: "decPbytes", expected: "42 PB"},
|
||||
{name: "decpbytes: 42 = 42 PB", value: 42, unit: "decpbytes", expected: "42 PB"},
|
||||
{name: "PBy: 42 = 42 PB", value: 42, unit: "PBy", expected: "42 PB"},
|
||||
|
||||
// Exbibytes - EiBy (IEC)
|
||||
{name: "EiBy: 10 = 10 EiB", value: 10, unit: "EiBy", expected: "10 EiB"},
|
||||
|
||||
// Exabytes - EBy (SI)
|
||||
{name: "EBy: 10 = 10 EB", value: 10, unit: "EBy", expected: "10 EB"},
|
||||
|
||||
// Kibibits - Kibit (IEC): 1 Kibit = 1024 bits = 128 bytes
|
||||
{name: "Kibit: 1 = 128 B", value: 1, unit: "Kibit", expected: "128 B"},
|
||||
{name: "Kibit: 1024 = 128 KiB", value: 1024, unit: "Kibit", expected: "128 KiB"},
|
||||
|
||||
// Mebibits - Mibit (IEC): 1 Mibit = 1024 Kibit = 128 KiB
|
||||
{name: "Mibit: 1 = 128 KiB", value: 1, unit: "Mibit", expected: "128 KiB"},
|
||||
{name: "Mibit: 1024 = 128 MiB", value: 1024, unit: "Mibit", expected: "128 MiB"},
|
||||
|
||||
// Gibibits - Gibit (IEC): 1 Gibit = 1024 Mibit = 128 MiB
|
||||
{name: "Gibit: 1 = 128 MiB", value: 1, unit: "Gibit", expected: "128 MiB"},
|
||||
{name: "Gibit: 42*1024 = 5.3 TiB", value: 42 * 1024, unit: "Gibit", expected: "5.3 TiB"},
|
||||
|
||||
// Tebibits - Tibit (IEC): 1 Tibit = 1024 Gibit = 128 GiB
|
||||
{name: "Tibit: 1 = 128 GiB", value: 1, unit: "Tibit", expected: "128 GiB"},
|
||||
{name: "Tibit: 42*1024 = 5.3 PiB", value: 42 * 1024, unit: "Tibit", expected: "5.3 PiB"},
|
||||
|
||||
// Kilobits - kbit (SI): 1 kbit = 1000 bits = 125 bytes
|
||||
{name: "kbit: 1 = 125 B", value: 1, unit: "kbit", expected: "125 B"},
|
||||
{name: "kbit: 1000 = 125 kB", value: 1000, unit: "kbit", expected: "125 kB"},
|
||||
|
||||
// Megabits - Mbit (SI): 1 Mbit = 1000 kbit = 125 kB
|
||||
{name: "Mbit: 1 = 125 kB", value: 1, unit: "Mbit", expected: "125 kB"},
|
||||
{name: "Mbit: 1000 = 125 MB", value: 1000, unit: "Mbit", expected: "125 MB"},
|
||||
|
||||
// Gigabits - Gbit (SI): 1 Gbit = 1000 Mbit = 125 MB
|
||||
{name: "Gbit: 1 = 125 MB", value: 1, unit: "Gbit", expected: "125 MB"},
|
||||
{name: "Gbit: 42*1000 = 5.3 TB", value: 42 * 1000, unit: "Gbit", expected: "5.3 TB"},
|
||||
|
||||
// Terabits - Tbit (SI): 1 Tbit = 1000 Gbit = 125 GB
|
||||
{name: "Tbit: 1 = 125 GB", value: 1, unit: "Tbit", expected: "125 GB"},
|
||||
{name: "Tbit: 42*1000 = 5.3 PB", value: 42 * 1000, unit: "Tbit", expected: "5.3 PB"},
|
||||
|
||||
// Petabits - Pbit (SI): 1 Pbit = 1000 Tbit = 125 TB
|
||||
{name: "Pbit: 1 = 125 TB", value: 1, unit: "Pbit", expected: "125 TB"},
|
||||
{name: "Pbit: 42 = 5.3 PB", value: 42, unit: "Pbit", expected: "5.3 PB"},
|
||||
|
||||
// Exabits - Ebit (SI): 1 Ebit = 1000 Pbit = 125 PB
|
||||
{name: "Ebit: 1 = 125 PB", value: 1, unit: "Ebit", expected: "125 PB"},
|
||||
{name: "Ebit: 10 = 1.3 EB", value: 10, unit: "Ebit", expected: "1.3 EB"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := dataFormatter.Format(tt.value, tt.unit)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,11 +18,11 @@ var (
|
||||
|
||||
func FromUnit(u string) Formatter {
|
||||
switch u {
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min":
|
||||
case "ns", "us", "µs", "ms", "s", "m", "h", "d", "min", "w", "wk":
|
||||
return DurationFormatter
|
||||
case "bytes", "decbytes", "bits", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy":
|
||||
case "bytes", "decbytes", "bits", "bit", "decbits", "kbytes", "decKbytes", "deckbytes", "mbytes", "decMbytes", "decmbytes", "gbytes", "decGbytes", "decgbytes", "tbytes", "decTbytes", "dectbytes", "pbytes", "decPbytes", "decpbytes", "By", "kBy", "MBy", "GBy", "TBy", "PBy", "EBy", "KiBy", "MiBy", "GiBy", "TiBy", "PiBy", "EiBy", "kbit", "Mbit", "Gbit", "Tbit", "Pbit", "Ebit", "Kibit", "Mibit", "Gibit", "Tibit", "Pibit":
|
||||
return DataFormatter
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s":
|
||||
case "binBps", "Bps", "binbps", "bps", "KiBs", "Kibits", "KBs", "Kbits", "MiBs", "Mibits", "MBs", "Mbits", "GiBs", "Gibits", "GBs", "Gbits", "TiBs", "Tibits", "TBs", "Tbits", "PiBs", "Pibits", "PBs", "Pbits", "By/s", "kBy/s", "MBy/s", "GBy/s", "TBy/s", "PBy/s", "EBy/s", "bit/s", "kbit/s", "Mbit/s", "Gbit/s", "Tbit/s", "Pbit/s", "Ebit/s", "KiBy/s", "MiBy/s", "GiBy/s", "TiBy/s", "PiBy/s", "EiBy/s", "Kibit/s", "Mibit/s", "Gibit/s", "Tibit/s", "Pibit/s", "Eibit/s":
|
||||
return DataRateFormatter
|
||||
case "percent", "percentunit", "%":
|
||||
return PercentFormatter
|
||||
|
||||
@@ -32,7 +32,7 @@ func (f *durationFormatter) Format(value float64, unit string) string {
|
||||
return toHours(value)
|
||||
case "d":
|
||||
return toDays(value)
|
||||
case "w":
|
||||
case "w", "wk":
|
||||
return toWeeks(value)
|
||||
}
|
||||
// When unit is not matched, return the value as it is.
|
||||
|
||||
@@ -205,7 +205,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
key.Indexes = matchingKey.Indexes
|
||||
key.Materialized = matchingKey.Materialized
|
||||
key.JSONPlan = matchingKey.JSONPlan
|
||||
|
||||
|
||||
return actions
|
||||
} else {
|
||||
// multiple matching keys, set materialized only if all the keys are materialized
|
||||
|
||||
@@ -483,6 +483,22 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
value1 := v.Visit(values[0])
|
||||
value2 := v.Visit(values[1])
|
||||
|
||||
switch value1.(type) {
|
||||
case float64:
|
||||
if _, ok := value2.(float64); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected number for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
case string:
|
||||
if _, ok := value2.(string); !ok {
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: expected string for both operands", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
default:
|
||||
v.errors = append(v.errors, fmt.Sprintf("value type mismatch for key %s: operands must be number or string", keys[0].Name))
|
||||
return ""
|
||||
}
|
||||
|
||||
var conds []string
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, []any{value1, value2}, v.builder, v.startNs, v.endNs)
|
||||
@@ -855,7 +871,7 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
// 1. either user meant key ( this is already handled above in fieldKeysForName )
|
||||
// 2. or user meant `attribute.key` we look up in the map for all possible field keys with name 'attribute.key'
|
||||
|
||||
// Note:
|
||||
// Note:
|
||||
// If user only wants to search `attribute.key`, then they have to use `attribute.attribute.key`
|
||||
// If user only wants to search `key`, then they have to use `key`
|
||||
// If user wants to search both, they can use `attribute.key` and we will resolve the ambiguity
|
||||
|
||||
@@ -375,13 +375,6 @@ func mergeAndEnsureBackwardCompatibility(ctx context.Context, logger *slog.Logge
|
||||
config.Flagger.Config.Boolean[flagger.FeatureKafkaSpanEval.String()] = os.Getenv("KAFKA_SPAN_EVAL") == "true"
|
||||
}
|
||||
|
||||
if os.Getenv("INTERPOLATION_ENABLED") != "" {
|
||||
logger.WarnContext(ctx, "[Deprecated] env INTERPOLATION_ENABLED is deprecated and scheduled for removal. Please use SIGNOZ_FLAGGER_CONFIG_BOOLEAN_INTERPOLATION__ENABLED instead.")
|
||||
if config.Flagger.Config.Boolean == nil {
|
||||
config.Flagger.Config.Boolean = make(map[string]bool)
|
||||
}
|
||||
config.Flagger.Config.Boolean[flagger.FeatureInterpolationEnabled.String()] = os.Getenv("INTERPOLATION_ENABLED") == "true"
|
||||
}
|
||||
}
|
||||
|
||||
func (config Config) Collect(_ context.Context, _ valuer.UUID) (map[string]any, error) {
|
||||
|
||||
@@ -167,6 +167,8 @@ func NewSQLMigrationProviderFactories(
|
||||
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
|
||||
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
|
||||
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
|
||||
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
|
||||
sqlmigration.NewAddUserEmailOrgIDIndexFactory(sqlstore, sqlschema),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@@ -389,6 +389,8 @@ func New(
|
||||
// Initialize all modules
|
||||
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
|
||||
|
||||
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
|
||||
|
||||
// Initialize all handlers for the modules
|
||||
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
|
||||
|
||||
@@ -438,6 +440,7 @@ func New(
|
||||
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
|
||||
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
|
||||
factory.NewNamedService(factory.MustNewName("authz"), authz),
|
||||
factory.NewNamedService(factory.MustNewName("user"), userService),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
80
pkg/sqlmigration/064_add_root_user.go
Normal file
80
pkg/sqlmigration/064_add_root_user.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addRootUser struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addRootUser{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
column := &sqlschema.Column{
|
||||
Name: sqlschema.ColumnName("is_root"),
|
||||
DataType: sqlschema.DataTypeBoolean,
|
||||
Nullable: false,
|
||||
}
|
||||
|
||||
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
61
pkg/sqlmigration/065_add_user_email_org_id_index.go
Normal file
61
pkg/sqlmigration/065_add_user_email_org_id_index.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package sqlmigration
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/sqlschema"
|
||||
"github.com/SigNoz/signoz/pkg/sqlstore"
|
||||
"github.com/uptrace/bun"
|
||||
"github.com/uptrace/bun/migrate"
|
||||
)
|
||||
|
||||
type addUserEmailOrgIDIndex struct {
|
||||
sqlstore sqlstore.SQLStore
|
||||
sqlschema sqlschema.SQLSchema
|
||||
}
|
||||
|
||||
func NewAddUserEmailOrgIDIndexFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
|
||||
return factory.NewProviderFactory(factory.MustNewName("add_user_email_org_id_index"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
|
||||
return &addUserEmailOrgIDIndex{
|
||||
sqlstore: sqlstore,
|
||||
sqlschema: sqlschema,
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (migration *addUserEmailOrgIDIndex) Register(migrations *migrate.Migrations) error {
|
||||
if err := migrations.Register(migration.Up, migration.Down); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addUserEmailOrgIDIndex) Up(ctx context.Context, db *bun.DB) error {
|
||||
tx, err := db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defer func() {
|
||||
_ = tx.Rollback()
|
||||
}()
|
||||
|
||||
sqls := migration.sqlschema.Operator().CreateIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"email", "org_id"}})
|
||||
for _, sql := range sqls {
|
||||
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (migration *addUserEmailOrgIDIndex) Down(ctx context.Context, db *bun.DB) error {
|
||||
return nil
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/factory"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
|
||||
@@ -73,7 +74,7 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
cteArgs [][]any
|
||||
)
|
||||
|
||||
if b.metricsStatementBuilder.CanShortCircuitDelta(query) {
|
||||
if qbtypes.CanShortCircuitDelta(query.Aggregations[0]) {
|
||||
// spatial_aggregation_cte directly for certain delta queries
|
||||
if frag, args, err := b.buildTemporalAggDeltaFastPath(ctx, start, end, query, keys, variables); err != nil {
|
||||
return nil, err
|
||||
@@ -91,8 +92,9 @@ func (b *meterQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// spatial_aggregation_cte
|
||||
frag, args := b.buildSpatialAggregationCTE(ctx, start, end, query, keys)
|
||||
if frag != "" {
|
||||
if frag, args, err := b.buildSpatialAggregationCTE(ctx, start, end, query, keys); err != nil {
|
||||
return nil, err
|
||||
} else if frag != "" {
|
||||
cteFragments = append(cteFragments, frag)
|
||||
cteArgs = append(cteArgs, args)
|
||||
}
|
||||
@@ -122,13 +124,16 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
for _, g := range query.GroupBy {
|
||||
col, err := b.fm.ColumnExpressionFor(ctx, &g.TelemetryFieldKey, keys)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
return "", nil, err
|
||||
}
|
||||
sb.SelectMore(col)
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -150,7 +155,7 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDeltaFastPath(
|
||||
Variables: variables,
|
||||
}, start, end)
|
||||
if err != nil {
|
||||
return "", []any{}, err
|
||||
return "", nil, err
|
||||
}
|
||||
}
|
||||
if filterWhere != nil {
|
||||
@@ -208,8 +213,11 @@ func (b *meterQueryStatementBuilder) buildTemporalAggDelta(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality,
|
||||
query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
if query.Aggregations[0].TimeAggregation == metrictypes.TimeAggregationRate {
|
||||
aggCol = fmt.Sprintf("%s/%d", aggCol, stepSec)
|
||||
}
|
||||
@@ -278,7 +286,10 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
}
|
||||
|
||||
tbl := WhichSamplesTableToUse(start, end, query.Aggregations[0].Type, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
aggCol, err := AggregationColumnForSamplesTable(start, end, query.Aggregations[0].Type, query.Aggregations[0].Temporality, query.Aggregations[0].TimeAggregation, query.Aggregations[0].TableHints)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
baseSb.SelectMore(fmt.Sprintf("%s AS per_series_value", aggCol))
|
||||
|
||||
baseSb.From(fmt.Sprintf("%s.%s AS points", DBName, tbl))
|
||||
@@ -315,25 +326,23 @@ func (b *meterQueryStatementBuilder) buildTemporalAggCumulativeOrUnspecified(
|
||||
|
||||
switch query.Aggregations[0].TimeAggregation {
|
||||
case metrictypes.TimeAggregationRate:
|
||||
rateExpr := fmt.Sprintf(telemetrymetrics.RateWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", rateExpr))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.RateTmpl))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
|
||||
case metrictypes.TimeAggregationIncrease:
|
||||
incExpr := fmt.Sprintf(telemetrymetrics.IncreaseWithoutNegative, start, start)
|
||||
wrapped := sqlbuilder.NewSelectBuilder()
|
||||
wrapped.Select("ts")
|
||||
for _, g := range query.GroupBy {
|
||||
wrapped.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", incExpr))
|
||||
wrapped.SelectMore(fmt.Sprintf("%s AS per_series_value", telemetrymetrics.IncreaseTmpl))
|
||||
wrapped.From(fmt.Sprintf("(%s) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)", innerQuery))
|
||||
q, args := wrapped.BuildWithFlavor(sqlbuilder.ClickHouse, innerArgs...)
|
||||
return fmt.Sprintf("__temporal_aggregation_cte AS (%s)", q), args, nil
|
||||
@@ -348,7 +357,15 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
_ uint64,
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation],
|
||||
_ map[string][]*telemetrytypes.TelemetryFieldKey,
|
||||
) (string, []any) {
|
||||
) (string, []any, error) {
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsZero() {
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`]",
|
||||
)
|
||||
}
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
|
||||
sb.Select("ts")
|
||||
@@ -365,5 +382,5 @@ func (b *meterQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
|
||||
q, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args
|
||||
return fmt.Sprintf("__spatial_aggregation_cte AS (%s)", q), args, nil
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, If((per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1, 0) OVER rate_window) / (ts - lagInFrame(ts, 1, toDateTime(fromUnixTimestamp64Milli(1747785600000))) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT ts, `service.name`, multiIf(row_number() OVER rate_window = 1, nan, (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) < 0, per_series_value / (ts - lagInFrame(ts, 1) OVER rate_window), (per_series_value - lagInFrame(per_series_value, 1) OVER rate_window) / (ts - lagInFrame(ts, 1) OVER rate_window)) AS per_series_value FROM (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, max(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts) WINDOW rate_window AS (PARTITION BY fingerprint ORDER BY fingerprint, ts)), __spatial_aggregation_cte AS (SELECT ts, `service.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747785600000), uint64(1747983420000), "cartservice", "cumulative", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -84,7 +84,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __spatial_aggregation_cte AS (SELECT toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta"},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -117,7 +117,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'service.name') AS `service.name`, sum(value)/86400 AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'service.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `service.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `service.name`, avg(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `service.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `service.name`, ts",
|
||||
Args: []any{"signoz_calls_total", uint64(1747872000000), uint64(1747983420000), "cartservice", "delta", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
@@ -150,7 +150,7 @@ func TestStatementBuilder(t *testing.T) {
|
||||
},
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte",
|
||||
Query: "WITH __temporal_aggregation_cte AS (SELECT fingerprint, toStartOfInterval(toDateTime(intDiv(unix_milli, 1000)), toIntervalSecond(86400)) AS ts, JSONExtractString(labels, 'host.name') AS `host.name`, avg(value) AS per_series_value FROM signoz_meter.distributed_samples AS points WHERE metric_name IN (?) AND unix_milli >= ? AND unix_milli < ? AND JSONExtractString(labels, 'host.name') = ? AND LOWER(temporality) LIKE LOWER(?) GROUP BY fingerprint, ts, `host.name` ORDER BY fingerprint, ts), __spatial_aggregation_cte AS (SELECT ts, `host.name`, sum(per_series_value) AS value FROM __temporal_aggregation_cte WHERE isNaN(per_series_value) = ? GROUP BY ts, `host.name`) SELECT * FROM __spatial_aggregation_cte ORDER BY `host.name`, ts",
|
||||
Args: []any{"system.memory.usage", uint64(1747872000000), uint64(1747983420000), "big-data-node-1", "unspecified", 0},
|
||||
},
|
||||
expectedErr: nil,
|
||||
|
||||
@@ -3,6 +3,7 @@ package telemetrymeter
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -63,7 +64,7 @@ func AggregationColumnForSamplesTable(
|
||||
temporality metrictypes.Temporality,
|
||||
timeAggregation metrictypes.TimeAggregation,
|
||||
tableHints *metrictypes.MetricTableHints,
|
||||
) string {
|
||||
) (string, error) {
|
||||
tableName := WhichSamplesTableToUse(start, end, metricType, timeAggregation, tableHints)
|
||||
var aggregationColumn string
|
||||
switch temporality {
|
||||
@@ -190,5 +191,13 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
|
||||
}
|
||||
return aggregationColumn
|
||||
|
||||
if aggregationColumn == "" {
|
||||
return "", errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid time aggregation, should be one of the following: [`latest`, `sum`, `avg`, `min`, `max`, `count`, `rate`, `increase`]",
|
||||
)
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
}
|
||||
|
||||
@@ -29,13 +29,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
sb *sqlbuilder.SelectBuilder,
|
||||
) (string, error) {
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains,
|
||||
qbtypes.FilterOperatorNotContains,
|
||||
qbtypes.FilterOperatorILike,
|
||||
qbtypes.FilterOperatorNotILike,
|
||||
qbtypes.FilterOperatorLike,
|
||||
qbtypes.FilterOperatorNotLike:
|
||||
if operator.IsStringSearchOperator() {
|
||||
value = querybuilder.FormatValueForContains(value)
|
||||
}
|
||||
|
||||
@@ -44,6 +38,18 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
// TODO(srikanthccv): use the same data type collision handling when metrics schemas are updated
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
case []any:
|
||||
if len(v) > 0 && (operator == qbtypes.FilterOperatorBetween || operator == qbtypes.FilterOperatorNotBetween) {
|
||||
if _, ok := v[0].(float64); ok {
|
||||
tblFieldName = fmt.Sprintf("toFloat64OrNull(%s)", tblFieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorEqual:
|
||||
return sb.E(tblFieldName, value), nil
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user