mirror of
https://github.com/SigNoz/signoz.git
synced 2026-04-28 14:40:32 +01:00
Compare commits
11 Commits
issue_4785
...
feat/billi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f06577ba97 | ||
|
|
7b77e458fe | ||
|
|
78e3916aea | ||
|
|
cd56c9efea | ||
|
|
a4266fa703 | ||
|
|
49419349f5 | ||
|
|
5af4ff84a3 | ||
|
|
bcfecaaaff | ||
|
|
86f2bcb2f3 | ||
|
|
21b3fcc6ac | ||
|
|
ccd2e0ffdd |
@@ -2933,8 +2933,8 @@ components:
|
||||
type: object
|
||||
PromotetypesWrappedIndex:
|
||||
properties:
|
||||
column_type:
|
||||
type: string
|
||||
fieldDataType:
|
||||
$ref: '#/components/schemas/TelemetrytypesFieldDataType'
|
||||
granularity:
|
||||
type: integer
|
||||
type:
|
||||
|
||||
@@ -5,12 +5,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/ee/query-service/constants"
|
||||
"github.com/SigNoz/signoz/ee/query-service/model"
|
||||
"github.com/SigNoz/signoz/pkg/flagger"
|
||||
"github.com/SigNoz/signoz/pkg/types/authtypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
type DayWiseBreakdown struct {
|
||||
@@ -70,53 +65,18 @@ func (ah *APIHandler) getBilling(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
claims, err := authtypes.ClaimsFromContext(r.Context())
|
||||
data, err := ah.Signoz.Zeus.GetMeters(r.Context(), licenseKey)
|
||||
if err != nil {
|
||||
RespondError(w, model.InternalError(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
orgID := valuer.MustNewUUID(claims.OrgID)
|
||||
evalCtx := featuretypes.NewFlaggerEvaluationContext(orgID)
|
||||
useZeus := ah.Signoz.Flagger.BooleanOrEmpty(r.Context(), flagger.FeatureGetMetersFromZeus, evalCtx)
|
||||
|
||||
if useZeus {
|
||||
data, err := ah.Signoz.Zeus.GetMeters(r.Context(), licenseKey)
|
||||
if err != nil {
|
||||
RespondError(w, model.InternalError(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
var billing billingData
|
||||
if err := json.Unmarshal(data, &billing); err != nil {
|
||||
RespondError(w, model.InternalError(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, billing)
|
||||
return
|
||||
}
|
||||
|
||||
billingURL := fmt.Sprintf("%s/usage?licenseKey=%s", constants.LicenseSignozIo, licenseKey)
|
||||
|
||||
hClient := &http.Client{}
|
||||
req, err := http.NewRequest("GET", billingURL, nil)
|
||||
if err != nil {
|
||||
RespondError(w, model.InternalError(err), nil)
|
||||
return
|
||||
}
|
||||
req.Header.Add("X-SigNoz-SecretKey", constants.LicenseAPIKey)
|
||||
billingResp, err := hClient.Do(req)
|
||||
if err != nil {
|
||||
var billing billingData
|
||||
if err := json.Unmarshal(data, &billing); err != nil {
|
||||
RespondError(w, model.InternalError(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
var billingResponse billingDetails
|
||||
if err := json.NewDecoder(billingResp.Body).Decode(&billingResponse); err != nil {
|
||||
RespondError(w, model.InternalError(err), nil)
|
||||
return
|
||||
}
|
||||
|
||||
ah.Respond(w, billingResponse.Data)
|
||||
ah.Respond(w, billing)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -3701,10 +3701,7 @@ export interface PromotetypesPromotePathDTO {
|
||||
}
|
||||
|
||||
export interface PromotetypesWrappedIndexDTO {
|
||||
/**
|
||||
* @type string
|
||||
*/
|
||||
column_type?: string;
|
||||
fieldDataType?: TelemetrytypesFieldDataTypeDTO;
|
||||
/**
|
||||
* @type integer
|
||||
*/
|
||||
|
||||
@@ -10,6 +10,7 @@ import cx from 'classnames';
|
||||
import { LogType } from 'components/Logs/LogStateIndicator/LogStateIndicator';
|
||||
import QuerySearch from 'components/QueryBuilderV2/QueryV2/QuerySearch/QuerySearch';
|
||||
import { convertExpressionToFilters } from 'components/QueryBuilderV2/utils';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { LOCALSTORAGE } from 'constants/localStorage';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { initialQueriesMap, PANEL_TYPES } from 'constants/queryBuilder';
|
||||
@@ -46,6 +47,7 @@ import {
|
||||
TextSelect,
|
||||
X,
|
||||
} from 'lucide-react';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import { AppState } from 'store/reducers';
|
||||
import { Query, TagFilter } from 'types/api/queryBuilder/queryBuilderData';
|
||||
import { DataSource, StringOperators } from 'types/common/queryBuilder';
|
||||
@@ -79,6 +81,10 @@ function LogDetailInner({
|
||||
const [selectedView, setSelectedView] = useState<VIEWS>(selectedTab);
|
||||
|
||||
const [isFilterVisible, setIsFilterVisible] = useState<boolean>(false);
|
||||
const { featureFlags } = useAppContext();
|
||||
const isBodyJsonQueryEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.BODY_JSON_ENABLED)
|
||||
?.active || false;
|
||||
|
||||
const [filters, setFilters] = useState<TagFilter | null>(null);
|
||||
const [isEdit, setIsEdit] = useState<boolean>(false);
|
||||
@@ -208,11 +214,29 @@ function LogDetailInner({
|
||||
}
|
||||
};
|
||||
|
||||
const logBody = useMemo(() => {
|
||||
if (!isBodyJsonQueryEnabled) {
|
||||
return log?.body || '';
|
||||
}
|
||||
|
||||
try {
|
||||
const json = JSON.parse(log?.body || '');
|
||||
|
||||
if (typeof json?.message === 'string' && json.message !== '') {
|
||||
return json.message;
|
||||
}
|
||||
|
||||
return log?.body || '';
|
||||
} catch (error) {
|
||||
return log?.body || '';
|
||||
}
|
||||
}, [isBodyJsonQueryEnabled, log?.body]);
|
||||
|
||||
const htmlBody = useMemo(
|
||||
() => ({
|
||||
__html: getSanitizedLogBody(log?.body || '', { shouldEscapeHtml: true }),
|
||||
__html: getSanitizedLogBody(logBody || '', { shouldEscapeHtml: true }),
|
||||
}),
|
||||
[log?.body],
|
||||
[logBody],
|
||||
);
|
||||
|
||||
const handleJSONCopy = (): void => {
|
||||
@@ -418,7 +442,7 @@ function LogDetailInner({
|
||||
<div className="log-detail-drawer__log">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
|
||||
<Tooltip
|
||||
title={removeEscapeCharacters(log?.body)}
|
||||
title={removeEscapeCharacters(logBody)}
|
||||
placement="left"
|
||||
mouseLeaveDelay={0}
|
||||
>
|
||||
|
||||
@@ -9,4 +9,5 @@ export enum FeatureKeys {
|
||||
ANOMALY_DETECTION = 'anomaly_detection',
|
||||
ONBOARDING_V3 = 'onboarding_v3',
|
||||
DOT_METRICS_ENABLED = 'dot_metrics_enabled',
|
||||
BODY_JSON_ENABLED = 'body_json_enabled',
|
||||
}
|
||||
|
||||
@@ -8,8 +8,19 @@ import {
|
||||
OPERATORS,
|
||||
QUERY_BUILDER_FUNCTIONS,
|
||||
} from 'constants/antlrQueryConstants';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { QueryParams } from 'constants/query';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useGetSearchQueryParam } from 'hooks/queryBuilder/useGetSearchQueryParam';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import { ICurrentQueryData } from 'hooks/useHandleExplorerTabChange';
|
||||
import { useNotifications } from 'hooks/useNotifications';
|
||||
import { ExplorerViews } from 'pages/LogsExplorer/utils';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
import {
|
||||
BaseAutocompleteData,
|
||||
DataTypes,
|
||||
} from 'types/api/queryBuilder/queryAutocompleteResponse';
|
||||
|
||||
import { TitleWrapper } from './BodyTitleRenderer.styles';
|
||||
import { DROPDOWN_KEY } from './constant';
|
||||
@@ -25,17 +36,32 @@ function BodyTitleRenderer({
|
||||
parentIsArray = false,
|
||||
nodeKey,
|
||||
value,
|
||||
handleChangeSelectedView,
|
||||
}: BodyTitleRendererProps): JSX.Element {
|
||||
const { onAddToQuery } = useActiveLog();
|
||||
const { stagedQuery, updateQueriesData } = useQueryBuilder();
|
||||
|
||||
const { featureFlags } = useAppContext();
|
||||
const [, setCopy] = useCopyToClipboard();
|
||||
const { notifications } = useNotifications();
|
||||
const viewName = useGetSearchQueryParam(QueryParams.viewName) || '';
|
||||
|
||||
const cleanedNodeKey = removeObjectFromString(nodeKey);
|
||||
const isBodyJsonQueryEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.BODY_JSON_ENABLED)
|
||||
?.active || false;
|
||||
|
||||
// Group by is supported only for body json query enabled and not for array elements
|
||||
const isGroupBySupported =
|
||||
isBodyJsonQueryEnabled && !cleanedNodeKey.includes('[]');
|
||||
|
||||
const filterHandler = (isFilterIn: boolean) => (): void => {
|
||||
if (parentIsArray) {
|
||||
onAddToQuery(
|
||||
generateFieldKeyForArray(
|
||||
removeObjectFromString(nodeKey),
|
||||
cleanedNodeKey,
|
||||
getDataTypes(value),
|
||||
isBodyJsonQueryEnabled,
|
||||
),
|
||||
`${value}`,
|
||||
isFilterIn
|
||||
@@ -45,7 +71,7 @@ function BodyTitleRenderer({
|
||||
);
|
||||
} else {
|
||||
onAddToQuery(
|
||||
`body.${removeObjectFromString(nodeKey)}`,
|
||||
`body.${cleanedNodeKey}`,
|
||||
`${value}`,
|
||||
isFilterIn ? OPERATORS['='] : OPERATORS['!='],
|
||||
getDataTypes(value),
|
||||
@@ -53,10 +79,67 @@ function BodyTitleRenderer({
|
||||
}
|
||||
};
|
||||
|
||||
const groupByHandler = useCallback((): void => {
|
||||
if (!stagedQuery) {
|
||||
return;
|
||||
}
|
||||
|
||||
const groupByKey = parentIsArray
|
||||
? generateFieldKeyForArray(
|
||||
cleanedNodeKey,
|
||||
getDataTypes(value),
|
||||
isBodyJsonQueryEnabled,
|
||||
)
|
||||
: `body.${cleanedNodeKey}`;
|
||||
|
||||
const fieldDataType = getDataTypes(value);
|
||||
const normalizedDataType: DataTypes | undefined = Object.values(
|
||||
DataTypes,
|
||||
).includes(fieldDataType as DataTypes)
|
||||
? (fieldDataType as DataTypes)
|
||||
: undefined;
|
||||
|
||||
const updatedQuery = updateQueriesData(
|
||||
stagedQuery,
|
||||
'queryData',
|
||||
(item, index) => {
|
||||
if (index === 0) {
|
||||
const newGroupByItem: BaseAutocompleteData = {
|
||||
key: groupByKey,
|
||||
type: '',
|
||||
dataType: normalizedDataType,
|
||||
};
|
||||
|
||||
return { ...item, groupBy: [...(item.groupBy || []), newGroupByItem] };
|
||||
}
|
||||
|
||||
return item;
|
||||
},
|
||||
);
|
||||
|
||||
const queryData: ICurrentQueryData = {
|
||||
name: viewName,
|
||||
id: updatedQuery.id,
|
||||
query: updatedQuery,
|
||||
};
|
||||
|
||||
handleChangeSelectedView?.(ExplorerViews.TIMESERIES, queryData);
|
||||
}, [
|
||||
cleanedNodeKey,
|
||||
handleChangeSelectedView,
|
||||
isBodyJsonQueryEnabled,
|
||||
parentIsArray,
|
||||
stagedQuery,
|
||||
updateQueriesData,
|
||||
value,
|
||||
viewName,
|
||||
]);
|
||||
|
||||
const onClickHandler: MenuProps['onClick'] = (props): void => {
|
||||
const mapper = {
|
||||
[DROPDOWN_KEY.FILTER_IN]: filterHandler(true),
|
||||
[DROPDOWN_KEY.FILTER_OUT]: filterHandler(false),
|
||||
[DROPDOWN_KEY.GROUP_BY]: groupByHandler,
|
||||
};
|
||||
|
||||
const handler = mapper[props.key];
|
||||
@@ -76,6 +159,14 @@ function BodyTitleRenderer({
|
||||
key: DROPDOWN_KEY.FILTER_OUT,
|
||||
label: `Filter out ${value}`,
|
||||
},
|
||||
...(isGroupBySupported
|
||||
? [
|
||||
{
|
||||
key: DROPDOWN_KEY.GROUP_BY,
|
||||
label: `Group by ${nodeKey}`,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
],
|
||||
onClick: onClickHandler,
|
||||
};
|
||||
@@ -84,7 +175,6 @@ function BodyTitleRenderer({
|
||||
(e: React.MouseEvent): void => {
|
||||
// Prevent tree node expansion/collapse
|
||||
e.stopPropagation();
|
||||
const cleanedKey = removeObjectFromString(nodeKey);
|
||||
let copyText: string;
|
||||
|
||||
// Check if value is an object or array
|
||||
@@ -106,8 +196,8 @@ function BodyTitleRenderer({
|
||||
|
||||
if (copyText) {
|
||||
const notificationMessage = isObject
|
||||
? `${cleanedKey} object copied to clipboard`
|
||||
: `${cleanedKey} copied to clipboard`;
|
||||
? `${cleanedNodeKey} object copied to clipboard`
|
||||
: `${cleanedNodeKey} copied to clipboard`;
|
||||
|
||||
notifications.success({
|
||||
message: notificationMessage,
|
||||
@@ -115,7 +205,7 @@ function BodyTitleRenderer({
|
||||
});
|
||||
}
|
||||
},
|
||||
[nodeKey, parentIsArray, setCopy, value, notifications],
|
||||
[cleanedNodeKey, parentIsArray, setCopy, value, notifications],
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { MetricsType } from 'container/MetricsApplication/constant';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
@@ -6,6 +7,7 @@ export interface BodyTitleRendererProps {
|
||||
nodeKey: string;
|
||||
value: unknown;
|
||||
parentIsArray?: boolean;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
}
|
||||
|
||||
export type AnyObject = { [key: string]: any };
|
||||
|
||||
@@ -2,6 +2,7 @@ import React, { useCallback, useMemo, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { Button, Popover, Spin, Tooltip, Tree } from 'antd';
|
||||
import type { DataNode } from 'antd/es/tree';
|
||||
import GroupByIcon from 'assets/CustomIcons/GroupByIcon';
|
||||
import cx from 'classnames';
|
||||
import CopyClipboardHOC from 'components/Logs/CopyClipboardHOC';
|
||||
@@ -57,7 +58,7 @@ interface ITableViewActionsProps {
|
||||
}
|
||||
|
||||
// Memoized Tree Component
|
||||
const MemoizedTree = React.memo<{ treeData: any[] }>(({ treeData }) => (
|
||||
const MemoizedTree = React.memo<{ treeData: DataNode[] }>(({ treeData }) => (
|
||||
<Tree
|
||||
defaultExpandAll
|
||||
showLine
|
||||
@@ -74,50 +75,54 @@ const BodyContent: React.FC<{
|
||||
record: DataType;
|
||||
bodyHtml: { __html: string };
|
||||
textToCopy: string;
|
||||
}> = React.memo(({ fieldData, record, bodyHtml, textToCopy }) => {
|
||||
const { isLoading, treeData, error } = useAsyncJSONProcessing(
|
||||
fieldData.value,
|
||||
record.field === 'body',
|
||||
);
|
||||
|
||||
// Show JSON tree if available, otherwise show HTML content
|
||||
if (record.field === 'body' && treeData) {
|
||||
return <MemoizedTree treeData={treeData} />;
|
||||
}
|
||||
|
||||
if (record.field === 'body' && isLoading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||
<Spin size="small" />
|
||||
<span style={{ color: Color.BG_SIENNA_400 }}>Processing JSON...</span>
|
||||
</div>
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
}> = React.memo(
|
||||
({ fieldData, record, bodyHtml, textToCopy, handleChangeSelectedView }) => {
|
||||
const { isLoading, treeData, error } = useAsyncJSONProcessing(
|
||||
fieldData.value,
|
||||
record.field === 'body',
|
||||
handleChangeSelectedView,
|
||||
);
|
||||
}
|
||||
|
||||
if (record.field === 'body' && error) {
|
||||
return (
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
Error parsing Body JSON
|
||||
</span>
|
||||
);
|
||||
}
|
||||
// Show JSON tree if available, otherwise show HTML content
|
||||
if (record.field === 'body' && treeData) {
|
||||
return <MemoizedTree treeData={treeData} />;
|
||||
}
|
||||
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<CopyClipboardHOC entityKey="body" textToCopy={textToCopy}>
|
||||
if (record.field === 'body' && isLoading) {
|
||||
return (
|
||||
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
|
||||
<Spin size="small" />
|
||||
<span style={{ color: Color.BG_SIENNA_400 }}>Processing JSON...</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (record.field === 'body' && error) {
|
||||
return (
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
Error parsing Body JSON
|
||||
</span>
|
||||
</CopyClipboardHOC>
|
||||
);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
if (record.field === 'body') {
|
||||
return (
|
||||
<CopyClipboardHOC entityKey="body" textToCopy={textToCopy}>
|
||||
<span
|
||||
style={{ color: Color.BG_SIENNA_400, whiteSpace: 'pre-wrap', tabSize: 4 }}
|
||||
>
|
||||
<span dangerouslySetInnerHTML={bodyHtml} />
|
||||
</span>
|
||||
</CopyClipboardHOC>
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
);
|
||||
|
||||
BodyContent.displayName = 'BodyContent';
|
||||
|
||||
@@ -319,6 +324,7 @@ export default function TableViewActions(
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -342,6 +348,7 @@ export default function TableViewActions(
|
||||
fieldData,
|
||||
bodyHtml,
|
||||
textToCopy,
|
||||
handleChangeSelectedView,
|
||||
formatTimezoneAdjustedTimestamp,
|
||||
cleanTimestamp,
|
||||
]);
|
||||
@@ -355,6 +362,7 @@ export default function TableViewActions(
|
||||
record={record}
|
||||
bodyHtml={bodyHtml}
|
||||
textToCopy={textToCopy}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
{!isListViewPanel &&
|
||||
!RESTRICTED_SELECTED_FIELDS.includes(fieldFilterKey) && (
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { FeatureKeys } from 'constants/features';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { isEmpty } from 'lodash-es';
|
||||
import { useAppContext } from 'providers/App/App';
|
||||
|
||||
import { jsonToDataNodes, recursiveParseJSON } from '../utils';
|
||||
|
||||
@@ -9,6 +12,7 @@ const MAX_BODY_BYTES = 100 * 1024; // 100 KB
|
||||
const useAsyncJSONProcessing = (
|
||||
value: string,
|
||||
shouldProcess: boolean,
|
||||
handleChangeSelectedView?: ChangeViewFunctionType,
|
||||
): {
|
||||
isLoading: boolean;
|
||||
treeData: any[] | null;
|
||||
@@ -25,6 +29,10 @@ const useAsyncJSONProcessing = (
|
||||
});
|
||||
|
||||
const processingRef = useRef<boolean>(false);
|
||||
const { featureFlags } = useAppContext();
|
||||
const isBodyJsonQueryEnabled =
|
||||
featureFlags?.find((flag) => flag.name === FeatureKeys.BODY_JSON_ENABLED)
|
||||
?.active || false;
|
||||
|
||||
// eslint-disable-next-line sonarjs/cognitive-complexity
|
||||
useEffect((): (() => void) => {
|
||||
@@ -47,7 +55,10 @@ const useAsyncJSONProcessing = (
|
||||
try {
|
||||
const parsedBody = recursiveParseJSON(value);
|
||||
if (!isEmpty(parsedBody)) {
|
||||
const treeData = jsonToDataNodes(parsedBody);
|
||||
const treeData = jsonToDataNodes(parsedBody, {
|
||||
isBodyJsonQueryEnabled,
|
||||
handleChangeSelectedView,
|
||||
});
|
||||
setJsonState({ isLoading: false, treeData, error: null });
|
||||
} else {
|
||||
setJsonState({ isLoading: false, treeData: null, error: null });
|
||||
@@ -73,7 +84,10 @@ const useAsyncJSONProcessing = (
|
||||
try {
|
||||
const parsedBody = recursiveParseJSON(value);
|
||||
if (!isEmpty(parsedBody)) {
|
||||
const treeData = jsonToDataNodes(parsedBody);
|
||||
const treeData = jsonToDataNodes(parsedBody, {
|
||||
isBodyJsonQueryEnabled,
|
||||
handleChangeSelectedView,
|
||||
});
|
||||
setJsonState({ isLoading: false, treeData, error: null });
|
||||
} else {
|
||||
setJsonState({ isLoading: false, treeData: null, error: null });
|
||||
@@ -101,7 +115,7 @@ const useAsyncJSONProcessing = (
|
||||
return (): void => {
|
||||
processingRef.current = false;
|
||||
};
|
||||
}, [value, shouldProcess]);
|
||||
}, [value, shouldProcess, isBodyJsonQueryEnabled, handleChangeSelectedView]);
|
||||
|
||||
return jsonState;
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export const DROPDOWN_KEY = {
|
||||
FILTER_IN: 'filterIn',
|
||||
FILTER_OUT: 'filterOut',
|
||||
GROUP_BY: 'groupBy',
|
||||
};
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import Convert from 'ansi-to-html';
|
||||
import type { DataNode } from 'antd/es/tree';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { MetricsType } from 'container/MetricsApplication/constant';
|
||||
import dompurify from 'dompurify';
|
||||
import { uniqueId } from 'lodash-es';
|
||||
@@ -34,13 +35,32 @@ export const recursiveParseJSON = (obj: string): Record<string, unknown> => {
|
||||
}
|
||||
};
|
||||
|
||||
export const computeDataNode = (
|
||||
key: string,
|
||||
valueIsArray: boolean,
|
||||
value: unknown,
|
||||
nodeKey: string,
|
||||
parentIsArray: boolean,
|
||||
): DataNode => ({
|
||||
type JsonToDataNodesOptions = {
|
||||
parentKey?: string;
|
||||
parentIsArray?: boolean;
|
||||
isBodyJsonQueryEnabled?: boolean;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
};
|
||||
|
||||
type ComputeDataNodeOptions = {
|
||||
key: string;
|
||||
valueIsArray: boolean;
|
||||
value: unknown;
|
||||
nodeKey: string;
|
||||
parentIsArray: boolean;
|
||||
isBodyJsonQueryEnabled?: boolean;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
};
|
||||
|
||||
export const computeDataNode = ({
|
||||
key,
|
||||
valueIsArray,
|
||||
value,
|
||||
nodeKey,
|
||||
parentIsArray,
|
||||
isBodyJsonQueryEnabled = false,
|
||||
handleChangeSelectedView,
|
||||
}: ComputeDataNodeOptions): DataNode => ({
|
||||
key: uniqueId(),
|
||||
title: (
|
||||
<BodyTitleRenderer
|
||||
@@ -48,20 +68,30 @@ export const computeDataNode = (
|
||||
nodeKey={nodeKey}
|
||||
value={value}
|
||||
parentIsArray={parentIsArray}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
),
|
||||
children: jsonToDataNodes(
|
||||
value as Record<string, unknown>,
|
||||
valueIsArray ? `${nodeKey}[*]` : nodeKey,
|
||||
valueIsArray,
|
||||
),
|
||||
children: jsonToDataNodes(value as Record<string, unknown>, {
|
||||
parentKey: valueIsArray
|
||||
? `${nodeKey}${isBodyJsonQueryEnabled ? '[]' : '[*]'}`
|
||||
: nodeKey,
|
||||
parentIsArray: valueIsArray,
|
||||
isBodyJsonQueryEnabled,
|
||||
handleChangeSelectedView,
|
||||
}),
|
||||
});
|
||||
|
||||
export function jsonToDataNodes(
|
||||
json: Record<string, unknown>,
|
||||
parentKey = '',
|
||||
parentIsArray = false,
|
||||
options: JsonToDataNodesOptions = {},
|
||||
): DataNode[] {
|
||||
const {
|
||||
parentKey = '',
|
||||
parentIsArray = false,
|
||||
isBodyJsonQueryEnabled = false,
|
||||
handleChangeSelectedView,
|
||||
} = options;
|
||||
|
||||
return Object.entries(json).map(([key, value]) => {
|
||||
let nodeKey = parentKey || key;
|
||||
if (parentIsArray) {
|
||||
@@ -74,7 +104,15 @@ export function jsonToDataNodes(
|
||||
|
||||
if (parentIsArray) {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return computeDataNode(key, valueIsArray, value, nodeKey, parentIsArray);
|
||||
return computeDataNode({
|
||||
key,
|
||||
valueIsArray,
|
||||
value,
|
||||
nodeKey,
|
||||
parentIsArray,
|
||||
isBodyJsonQueryEnabled,
|
||||
handleChangeSelectedView,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -85,14 +123,31 @@ export function jsonToDataNodes(
|
||||
nodeKey={nodeKey}
|
||||
value={value}
|
||||
parentIsArray={parentIsArray}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
),
|
||||
children: jsonToDataNodes({}, nodeKey, valueIsArray),
|
||||
children: jsonToDataNodes(
|
||||
{},
|
||||
{
|
||||
parentKey: nodeKey,
|
||||
parentIsArray: valueIsArray,
|
||||
isBodyJsonQueryEnabled,
|
||||
handleChangeSelectedView,
|
||||
},
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
return computeDataNode(key, valueIsArray, value, nodeKey, parentIsArray);
|
||||
return computeDataNode({
|
||||
key,
|
||||
valueIsArray,
|
||||
value,
|
||||
nodeKey,
|
||||
parentIsArray,
|
||||
isBodyJsonQueryEnabled,
|
||||
handleChangeSelectedView,
|
||||
});
|
||||
}
|
||||
return {
|
||||
key: uniqueId(),
|
||||
@@ -102,6 +157,7 @@ export function jsonToDataNodes(
|
||||
nodeKey={nodeKey}
|
||||
value={value}
|
||||
parentIsArray={parentIsArray}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
),
|
||||
};
|
||||
@@ -123,6 +179,7 @@ export function flattenObject(obj: AnyObject, prefix = ''): AnyObject {
|
||||
export const generateFieldKeyForArray = (
|
||||
fieldKey: string,
|
||||
dataType: DataTypes,
|
||||
isBodyJsonQueryEnabled = false,
|
||||
): string => {
|
||||
let lastDotIndex = fieldKey.lastIndexOf('.');
|
||||
let resultNodeKey = fieldKey;
|
||||
@@ -138,6 +195,16 @@ export const generateFieldKeyForArray = (
|
||||
newResultNodeKey = resultNodeKey.substring(0, lastDotIndex);
|
||||
}
|
||||
}
|
||||
|
||||
// When filtering for a value inside an array, the query builder expects the
|
||||
// last array segment to be referenced without the trailing `[]`.
|
||||
// Examples:
|
||||
// - has(body.config.features, 'fast_checkout')
|
||||
// - has(body.config.features[].items, 'pen')
|
||||
// - has(body.config.features[].items[].variants, 'ballpen')
|
||||
if (isBodyJsonQueryEnabled && newResultNodeKey.endsWith('[]')) {
|
||||
newResultNodeKey = newResultNodeKey.slice(0, -2);
|
||||
}
|
||||
return `body.${newResultNodeKey}`;
|
||||
};
|
||||
|
||||
|
||||
2
go.mod
2
go.mod
@@ -8,7 +8,7 @@ require (
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.144.3-rc.4
|
||||
github.com/SigNoz/signoz-otel-collector v0.144.3
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
|
||||
4
go.sum
4
go.sum
@@ -108,8 +108,8 @@ github.com/SigNoz/expr v1.17.7-beta h1:FyZkleM5dTQ0O6muQfwGpoH5A2ohmN/XTasRCO72g
|
||||
github.com/SigNoz/expr v1.17.7-beta/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd h1:Bk43AsDYe0fhkbj57eGXx8H3ZJ4zhmQXBnrW523ktj8=
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd/go.mod h1:nxRcH/OEdM8QxzH37xkGzomr1O0JpYBRS6pwjsWW6Pc=
|
||||
github.com/SigNoz/signoz-otel-collector v0.144.3-rc.4 h1:EskJkEMfuuIyArWhV8SleDV/fuKxiaEGTXrCZIFqDT4=
|
||||
github.com/SigNoz/signoz-otel-collector v0.144.3-rc.4/go.mod h1:9pLVpcIQvUT88ZiNnZN/MI+XLruvwC+Xm2UzPmGjNfA=
|
||||
github.com/SigNoz/signoz-otel-collector v0.144.3 h1:/7PPIqIpPsaWtrgnfHam2XVYP41ZlgEKLHzQO8oVxcA=
|
||||
github.com/SigNoz/signoz-otel-collector v0.144.3/go.mod h1:9pLVpcIQvUT88ZiNnZN/MI+XLruvwC+Xm2UzPmGjNfA=
|
||||
github.com/Yiling-J/theine-go v0.6.2 h1:1GeoXeQ0O0AUkiwj2S9Jc0Mzx+hpqzmqsJ4kIC4M9AY=
|
||||
github.com/Yiling-J/theine-go v0.6.2/go.mod h1:08QpMa5JZ2pKN+UJCRrCasWYO1IKCdl54Xa836rpmDU=
|
||||
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
|
||||
|
||||
@@ -3,11 +3,10 @@ package flagger
|
||||
import "github.com/SigNoz/signoz/pkg/types/featuretypes"
|
||||
|
||||
var (
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureHideRootUser = featuretypes.MustNewName("hide_root_user")
|
||||
FeatureGetMetersFromZeus = featuretypes.MustNewName("get_meters_from_zeus")
|
||||
FeaturePutMetersInZeus = featuretypes.MustNewName("put_meters_in_zeus")
|
||||
FeatureUseSpanMetrics = featuretypes.MustNewName("use_span_metrics")
|
||||
FeatureKafkaSpanEval = featuretypes.MustNewName("kafka_span_eval")
|
||||
FeatureHideRootUser = featuretypes.MustNewName("hide_root_user")
|
||||
FeaturePutMetersInZeus = featuretypes.MustNewName("put_meters_in_zeus")
|
||||
)
|
||||
|
||||
func MustNewRegistry() featuretypes.Registry {
|
||||
@@ -36,14 +35,6 @@ func MustNewRegistry() featuretypes.Registry {
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeatureGetMetersFromZeus,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
Stage: featuretypes.StageExperimental,
|
||||
Description: "Controls whether billing details are fetched from Zeus instead of the legacy subscriptions service",
|
||||
DefaultVariant: featuretypes.MustNewName("disabled"),
|
||||
Variants: featuretypes.NewBooleanVariants(),
|
||||
},
|
||||
&featuretypes.Feature{
|
||||
Name: FeaturePutMetersInZeus,
|
||||
Kind: featuretypes.KindBoolean,
|
||||
|
||||
@@ -32,27 +32,17 @@ func NewModule(metadataStore telemetrytypes.MetadataStore, telemetrystore teleme
|
||||
}
|
||||
|
||||
func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetypes.PromotePath, error) {
|
||||
logsIndexes, err := m.metadataStore.ListLogsJSONIndexes(ctx)
|
||||
indexes, err := m.metadataStore.ListLogsJSONIndexes(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Flatten the map values (which are slices) into a single slice
|
||||
indexes := slices.Concat(slices.Collect(maps.Values(logsIndexes))...)
|
||||
|
||||
aggr := map[string][]promotetypes.WrappedIndex{}
|
||||
for _, index := range indexes {
|
||||
path, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// clean backticks from the path
|
||||
path = strings.ReplaceAll(path, "`", "")
|
||||
|
||||
aggr[path] = append(aggr[path], promotetypes.WrappedIndex{
|
||||
ColumnType: columnType,
|
||||
Type: index.Type,
|
||||
Granularity: index.Granularity,
|
||||
aggr[index.Name] = append(aggr[index.Name], promotetypes.WrappedIndex{
|
||||
FieldDataType: index.FieldDataType,
|
||||
Type: index.IndexType,
|
||||
Granularity: index.Granularity,
|
||||
})
|
||||
}
|
||||
promotedPaths, err := m.listPromotedPaths(ctx)
|
||||
|
||||
@@ -627,27 +627,17 @@ func convertTimeSeriesDataToScalar(tsData *qbtypes.TimeSeriesData, queryName str
|
||||
return &qbtypes.ScalarData{QueryName: queryName}
|
||||
}
|
||||
|
||||
// Series can have ragged label sets; build the column schema from the
|
||||
// union of all label keys (first-seen order) and fill rows by key lookup.
|
||||
keyOrder := []telemetrytypes.TelemetryFieldKey{}
|
||||
keyIndex := map[string]int{}
|
||||
for _, series := range tsData.Aggregations[0].Series {
|
||||
for _, label := range series.Labels {
|
||||
if _, ok := keyIndex[label.Key.Name]; ok {
|
||||
continue
|
||||
}
|
||||
keyIndex[label.Key.Name] = len(keyOrder)
|
||||
keyOrder = append(keyOrder, label.Key)
|
||||
}
|
||||
}
|
||||
columns := []*qbtypes.ColumnDescriptor{}
|
||||
|
||||
columns := make([]*qbtypes.ColumnDescriptor, 0, len(keyOrder)+len(tsData.Aggregations))
|
||||
for _, key := range keyOrder {
|
||||
columns = append(columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: key,
|
||||
QueryName: queryName,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
// Add group columns from first series
|
||||
if len(tsData.Aggregations[0].Series) > 0 {
|
||||
for _, label := range tsData.Aggregations[0].Series[0].Labels {
|
||||
columns = append(columns, &qbtypes.ColumnDescriptor{
|
||||
TelemetryFieldKey: label.Key,
|
||||
QueryName: queryName,
|
||||
Type: qbtypes.ColumnTypeGroup,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Add aggregation columns
|
||||
@@ -665,18 +655,18 @@ func convertTimeSeriesDataToScalar(tsData *qbtypes.TimeSeriesData, queryName str
|
||||
})
|
||||
}
|
||||
|
||||
// Build rows.
|
||||
groupColCount := len(keyOrder)
|
||||
// Build rows
|
||||
data := [][]any{}
|
||||
for seriesIdx, series := range tsData.Aggregations[0].Series {
|
||||
row := make([]any, len(columns))
|
||||
|
||||
// Place each label under its key's column (by lookup, not index).
|
||||
for _, label := range series.Labels {
|
||||
row[keyIndex[label.Key.Name]] = label.Value
|
||||
// Add group values
|
||||
for i, label := range series.Labels {
|
||||
row[i] = label.Value
|
||||
}
|
||||
|
||||
// Add aggregation values (last value)
|
||||
groupColCount := len(series.Labels)
|
||||
for aggIdx, agg := range tsData.Aggregations {
|
||||
if seriesIdx < len(agg.Series) && len(agg.Series[seriesIdx].Values) > 0 {
|
||||
lastValue := agg.Series[seriesIdx].Values[len(agg.Series[seriesIdx].Values)-1].Value
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
|
||||
// Multiple series with different number of labels, shouldn't panic and should align labels correctly.
|
||||
func TestConvertTimeSeriesDataToScalar_RaggedLabels(t *testing.T) {
|
||||
label := func(name string, value any) *qbtypes.Label {
|
||||
return &qbtypes.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: value,
|
||||
}
|
||||
}
|
||||
series := func(labels []*qbtypes.Label, value float64) *qbtypes.TimeSeries {
|
||||
return &qbtypes.TimeSeries{
|
||||
Labels: labels,
|
||||
Values: []*qbtypes.TimeSeriesValue{{Timestamp: 1, Value: value}},
|
||||
}
|
||||
}
|
||||
|
||||
tsData := &qbtypes.TimeSeriesData{
|
||||
QueryName: "A",
|
||||
Aggregations: []*qbtypes.AggregationBucket{{
|
||||
Index: 0,
|
||||
Series: []*qbtypes.TimeSeries{
|
||||
series([]*qbtypes.Label{label("label_1", "orphan-0")}, 20),
|
||||
series([]*qbtypes.Label{label("label_1", "box-0"), label("label_2", "rpc-0")}, 10),
|
||||
},
|
||||
}},
|
||||
}
|
||||
|
||||
var sd *qbtypes.ScalarData
|
||||
require.NotPanics(t, func() {
|
||||
sd = convertTimeSeriesDataToScalar(tsData, "A")
|
||||
})
|
||||
|
||||
require.NotNil(t, sd)
|
||||
require.Len(t, sd.Columns, 3)
|
||||
assert.Equal(t, "label_1", sd.Columns[0].Name)
|
||||
assert.Equal(t, "label_2", sd.Columns[1].Name)
|
||||
assert.Equal(t, "__result_0", sd.Columns[2].Name)
|
||||
|
||||
require.Len(t, sd.Data, 2)
|
||||
assert.Equal(t, []any{"orphan-0", nil, 20.0}, sd.Data[0])
|
||||
assert.Equal(t, []any{"box-0", "rpc-0", 10.0}, sd.Data[1])
|
||||
}
|
||||
@@ -204,7 +204,7 @@ func AdjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemet
|
||||
// Downstream query builder should handle multiple matching keys with their own metadata
|
||||
// and not rely on this function to do so.
|
||||
materialized := true
|
||||
indexes := []telemetrytypes.JSONDataTypeIndex{}
|
||||
indexes := []telemetrytypes.TelemetryFieldKeySkipIndex{}
|
||||
fieldContextsSeen := map[telemetrytypes.FieldContext]bool{}
|
||||
dataTypesSeen := map[telemetrytypes.FieldDataType]bool{}
|
||||
for _, matchingKey := range matchingKeys {
|
||||
|
||||
@@ -195,8 +195,8 @@ func (c *jsonConditionBuilder) buildPrimitiveTerminalCondition(node *telemetryty
|
||||
// the field genuinely holds the empty/zero value.
|
||||
//
|
||||
// Note: indexing is also skipped for Array Nested fields because they cannot be indexed.
|
||||
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.JSONDataTypeIndex) bool {
|
||||
return index.Type == node.TerminalConfig.ElemType
|
||||
indexed := slices.ContainsFunc(node.TerminalConfig.Key.Indexes, func(index telemetrytypes.TelemetryFieldKeySkipIndex) bool {
|
||||
return telemetrytypes.MappingFieldDataTypeToJSONDataType[index.FieldDataType] == node.TerminalConfig.ElemType
|
||||
})
|
||||
isExistsCheck := operator == qbtypes.FilterOperatorExists || operator == qbtypes.FilterOperatorNotExists
|
||||
if node.TerminalConfig.ElemType.IndexSupported && indexed && !isExistsCheck {
|
||||
|
||||
@@ -1127,9 +1127,12 @@ func buildTestTelemetryMetadataStore(t *testing.T, addIndexes bool) *telemetryty
|
||||
return entry.Path == path && entry.Type == jsonType
|
||||
})
|
||||
if idx >= 0 {
|
||||
key.Indexes = append(key.Indexes, telemetrytypes.JSONDataTypeIndex{
|
||||
Type: jsonType,
|
||||
ColumnExpression: schemamigrator.JSONSubColumnIndexExpr(LogsV2BodyV2Column, path, jsonType.StringValue()),
|
||||
key.Indexes = append(key.Indexes, telemetrytypes.TelemetryFieldKeySkipIndex{
|
||||
Name: path,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldDataType: fdt,
|
||||
BaseColumn: LogsV2BodyV2Column,
|
||||
IndexExpression: schemamigrator.JSONSubColumnIndexExpr(LogsV2BodyV2Column, path, jsonType.StringValue()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -106,7 +106,7 @@ func (t *telemetryMetaStore) buildJSONPlans(keys []*telemetrytypes.TelemetryFiel
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.JSONDataTypeIndex, error) {
|
||||
func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...string) (map[string][]telemetrytypes.TelemetryFieldKeySkipIndex, error) {
|
||||
filteredPaths := []string{}
|
||||
for _, path := range paths {
|
||||
// skip array paths; since they don't have any indexes
|
||||
@@ -116,47 +116,22 @@ func (t *telemetryMetaStore) getJSONPathIndexes(ctx context.Context, paths ...st
|
||||
filteredPaths = append(filteredPaths, path)
|
||||
}
|
||||
if len(filteredPaths) == 0 {
|
||||
return make(map[string][]telemetrytypes.JSONDataTypeIndex), nil
|
||||
return make(map[string][]telemetrytypes.TelemetryFieldKeySkipIndex), nil
|
||||
}
|
||||
|
||||
// list indexes for the paths
|
||||
indexesMap, err := t.ListLogsJSONIndexes(ctx, filteredPaths...)
|
||||
indexes, err := t.ListLogsJSONIndexes(ctx, filteredPaths...)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to list JSON path indexes")
|
||||
}
|
||||
|
||||
// build a set of indexes
|
||||
cleanIndexes := make(map[string][]telemetrytypes.JSONDataTypeIndex)
|
||||
for path, indexes := range indexesMap {
|
||||
for _, index := range indexes {
|
||||
columnExpr, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(index.Expression)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to unfold JSON sub column index expression: %s", index.Expression)
|
||||
}
|
||||
|
||||
jsonDataType, found := telemetrytypes.MappingStringToJSONDataType[columnType]
|
||||
if !found {
|
||||
t.logger.ErrorContext(ctx, "failed to map column type to JSON data type", slog.String("column_type", columnType), slog.String("column_expr", columnExpr))
|
||||
continue
|
||||
}
|
||||
|
||||
if jsonDataType == telemetrytypes.String {
|
||||
cleanIndexes[path] = append(cleanIndexes[path], telemetrytypes.JSONDataTypeIndex{
|
||||
Type: telemetrytypes.String,
|
||||
ColumnExpression: columnExpr,
|
||||
IndexExpression: index.Expression,
|
||||
})
|
||||
} else if strings.HasPrefix(index.Type, "minmax") {
|
||||
cleanIndexes[path] = append(cleanIndexes[path], telemetrytypes.JSONDataTypeIndex{
|
||||
Type: jsonDataType,
|
||||
ColumnExpression: columnExpr,
|
||||
IndexExpression: index.Expression,
|
||||
})
|
||||
}
|
||||
}
|
||||
fieldPathToIndexes := make(map[string][]telemetrytypes.TelemetryFieldKeySkipIndex)
|
||||
for _, index := range indexes {
|
||||
fieldPathToIndexes[index.Name] = append(fieldPathToIndexes[index.Name], index)
|
||||
}
|
||||
|
||||
return cleanIndexes, nil
|
||||
return fieldPathToIndexes, nil
|
||||
}
|
||||
|
||||
func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, []any) {
|
||||
@@ -173,14 +148,15 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
|
||||
filterExprs := []string{}
|
||||
for _, filter := range filters {
|
||||
filterExprs = append(filterExprs, sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(filter))))
|
||||
// Remove backticks from actual expr cuz paths from metadata doesn't have backticks
|
||||
filterExprs = append(filterExprs, sb.ILike("replaceAll(expr, '`', '')", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(filter))))
|
||||
}
|
||||
sb.Where(sb.Or(filterExprs...))
|
||||
|
||||
return sb.BuildWithFlavor(sqlbuilder.ClickHouse)
|
||||
}
|
||||
|
||||
func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) ([]telemetrytypes.TelemetryFieldKeySkipIndex, error) {
|
||||
ctx = withTelemetryContext(ctx, "ListLogsJSONIndexes")
|
||||
query, args := buildListLogsJSONIndexesQuery(t.telemetrystore.Cluster(), filters...)
|
||||
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
|
||||
@@ -189,7 +165,7 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
indexes := make(map[string][]schemamigrator.Index)
|
||||
indexes := []telemetrytypes.TelemetryFieldKeySkipIndex{}
|
||||
for rows.Next() {
|
||||
var name string
|
||||
var typeFull string
|
||||
@@ -198,11 +174,39 @@ func (t *telemetryMetaStore) ListLogsJSONIndexes(ctx context.Context, filters ..
|
||||
if err := rows.Scan(&name, &typeFull, &expr, &granularity); err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to scan string indexed column")
|
||||
}
|
||||
indexes[name] = append(indexes[name], schemamigrator.Index{
|
||||
Name: name,
|
||||
Type: typeFull,
|
||||
Expression: expr,
|
||||
Granularity: int(granularity),
|
||||
|
||||
columnExpr, columnType, err := schemamigrator.UnfoldJSONSubColumnIndexExpr(expr)
|
||||
if err != nil {
|
||||
return nil, errors.WrapInternalf(err, CodeFailLoadLogsJSONIndexes, "failed to unfold JSON sub column index expression: %s", expr)
|
||||
}
|
||||
|
||||
fdt, found := telemetrytypes.MappingJSONDataTypeToFieldDataType[columnType]
|
||||
if !found {
|
||||
t.logger.ErrorContext(ctx, "failed to map JSON data type to field data type", slog.String("column_type", columnType), slog.String("column_expr", columnExpr))
|
||||
continue
|
||||
}
|
||||
|
||||
baseColumn := ""
|
||||
fieldName := ""
|
||||
switch {
|
||||
case strings.HasPrefix(columnExpr, telemetrylogs.BodyV2ColumnPrefix):
|
||||
baseColumn = telemetrylogs.BodyV2ColumnPrefix
|
||||
fieldName = strings.TrimPrefix(columnExpr, telemetrylogs.BodyV2ColumnPrefix)
|
||||
case strings.HasPrefix(columnExpr, telemetrylogs.BodyPromotedColumnPrefix):
|
||||
baseColumn = telemetrylogs.BodyPromotedColumnPrefix
|
||||
fieldName = strings.TrimPrefix(columnExpr, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
}
|
||||
fieldName = strings.ReplaceAll(fieldName, "`", "")
|
||||
|
||||
indexes = append(indexes, telemetrytypes.TelemetryFieldKeySkipIndex{
|
||||
Name: fieldName,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldDataType: fdt,
|
||||
BaseColumn: baseColumn,
|
||||
IndexName: name,
|
||||
IndexType: typeFull,
|
||||
IndexExpression: expr,
|
||||
Granularity: int(granularity),
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
cluster: "test-cluster",
|
||||
filters: []string{"foo", "bar"},
|
||||
expectedSQL: "SELECT name, type_full, expr, granularity FROM clusterAllReplicas('test-cluster', system.data_skipping_indices) " +
|
||||
"WHERE database = ? AND table = ? AND (LOWER(expr) LIKE LOWER(?) OR LOWER(expr) LIKE LOWER(?)) AND (LOWER(expr) LIKE LOWER(?) OR LOWER(expr) LIKE LOWER(?))",
|
||||
"WHERE database = ? AND table = ? AND (LOWER(expr) LIKE LOWER(?) OR LOWER(expr) LIKE LOWER(?)) AND (LOWER(replaceAll(expr, '`', '')) LIKE LOWER(?) OR LOWER(replaceAll(expr, '`', '')) LIKE LOWER(?))",
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
|
||||
@@ -10,10 +10,10 @@ import (
|
||||
)
|
||||
|
||||
type WrappedIndex struct {
|
||||
JSONDataType telemetrytypes.JSONDataType `json:"-"`
|
||||
ColumnType string `json:"column_type"`
|
||||
Type string `json:"type"`
|
||||
Granularity int `json:"granularity"`
|
||||
JSONDataType telemetrytypes.JSONDataType `json:"-"`
|
||||
FieldDataType telemetrytypes.FieldDataType `json:"fieldDataType"`
|
||||
Type string `json:"type"`
|
||||
Granularity int `json:"granularity"`
|
||||
}
|
||||
|
||||
type PromotePath struct {
|
||||
@@ -60,12 +60,12 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index granularity must be greater than 0")
|
||||
}
|
||||
|
||||
jsonDataType, ok := telemetrytypes.MappingStringToJSONDataType[index.ColumnType]
|
||||
jsonDataType, ok := telemetrytypes.MappingFieldDataTypeToJSONDataType[index.FieldDataType]
|
||||
if !ok {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid column type: %s", index.ColumnType)
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid column type: %s", index.FieldDataType)
|
||||
}
|
||||
if !jsonDataType.IndexSupported {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index is not supported for column type: %s", index.ColumnType)
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "index is not supported for column type: %s", index.FieldDataType)
|
||||
}
|
||||
|
||||
i.Indexes[idx].JSONDataType = jsonDataType
|
||||
|
||||
@@ -37,9 +37,9 @@ type TelemetryFieldKey struct {
|
||||
FieldContext FieldContext `json:"fieldContext,omitzero"`
|
||||
FieldDataType FieldDataType `json:"fieldDataType,omitzero"`
|
||||
|
||||
JSONPlan JSONAccessPlan `json:"-"`
|
||||
Indexes []JSONDataTypeIndex `json:"-"`
|
||||
Materialized bool `json:"-"` // refers to promoted in case of body.... fields
|
||||
JSONPlan JSONAccessPlan `json:"-"`
|
||||
Indexes []TelemetryFieldKeySkipIndex `json:"-"`
|
||||
Materialized bool `json:"-"` // refers to promoted in case of body.... fields
|
||||
|
||||
Evolutions []*EvolutionEntry `json:"-"`
|
||||
}
|
||||
@@ -102,7 +102,7 @@ func (f TelemetryFieldKey) String() string {
|
||||
if i > 0 {
|
||||
sb.WriteString("; ")
|
||||
}
|
||||
fmt.Fprintf(&sb, "{type=%s, columnExpr=%s, indexExpr=%s}", index.Type.StringValue(), index.ColumnExpression, index.IndexExpression)
|
||||
fmt.Fprintf(&sb, "{type=%s, indexExpr=%s}", MappingFieldDataTypeToJSONDataType[index.FieldDataType].StringValue(), index.IndexExpression)
|
||||
}
|
||||
sb.WriteString("]")
|
||||
}
|
||||
@@ -400,3 +400,14 @@ func NewFieldValueSelectorFromPostableFieldValueParams(params PostableFieldValue
|
||||
|
||||
return fieldValueSelector
|
||||
}
|
||||
|
||||
type TelemetryFieldKeySkipIndex struct {
|
||||
Name string `json:"name"` // Name is TelemetryFieldKey.Name not IndexName from ClickHouse
|
||||
FieldContext FieldContext `json:"fieldContext,omitzero"`
|
||||
FieldDataType FieldDataType `json:"fieldDataType,omitzero"`
|
||||
BaseColumn string `json:"baseColumn"`
|
||||
IndexName string `json:"indexName"`
|
||||
IndexType string `json:"indexType"`
|
||||
IndexExpression string `json:"indexExpression"`
|
||||
Granularity int `json:"granularity"`
|
||||
}
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
package telemetrytypes
|
||||
|
||||
type JSONDataTypeIndex struct {
|
||||
Type JSONDataType
|
||||
ColumnExpression string
|
||||
IndexExpression string
|
||||
}
|
||||
|
||||
type JSONDataType struct {
|
||||
str string // Store the correct case for ClickHouse
|
||||
IsArray bool
|
||||
@@ -32,18 +26,17 @@ var (
|
||||
ArrayJSON = JSONDataType{"Array(JSON)", true, "JSON", false}
|
||||
)
|
||||
|
||||
var MappingStringToJSONDataType = map[string]JSONDataType{
|
||||
"String": String,
|
||||
"Int64": Int64,
|
||||
"Float64": Float64,
|
||||
"Bool": Bool,
|
||||
"Dynamic": Dynamic,
|
||||
"Array(Nullable(String))": ArrayString,
|
||||
"Array(Nullable(Int64))": ArrayInt64,
|
||||
"Array(Nullable(Float64))": ArrayFloat64,
|
||||
"Array(Nullable(Bool))": ArrayBool,
|
||||
"Array(Dynamic)": ArrayDynamic,
|
||||
"Array(JSON)": ArrayJSON,
|
||||
var MappingJSONDataTypeToFieldDataType = map[string]FieldDataType{
|
||||
"String": FieldDataTypeString,
|
||||
"Int64": FieldDataTypeInt64,
|
||||
"Float64": FieldDataTypeFloat64,
|
||||
"Bool": FieldDataTypeBool,
|
||||
"Array(Nullable(String))": FieldDataTypeArrayString,
|
||||
"Array(Nullable(Int64))": FieldDataTypeArrayInt64,
|
||||
"Array(Nullable(Float64))": FieldDataTypeArrayFloat64,
|
||||
"Array(Nullable(Bool))": FieldDataTypeArrayBool,
|
||||
"Array(Dynamic)": FieldDataTypeArrayDynamic,
|
||||
"Array(JSON)": FieldDataTypeArrayJSON,
|
||||
}
|
||||
|
||||
var MappingFieldDataTypeToJSONDataType = map[FieldDataType]JSONDataType{
|
||||
|
||||
@@ -3,7 +3,6 @@ package telemetrytypes
|
||||
import (
|
||||
"context"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
)
|
||||
|
||||
@@ -35,7 +34,7 @@ type MetadataStore interface {
|
||||
FetchTemporalityAndTypeMulti(ctx context.Context, queryTimeRangeStartTs, queryTimeRangeEndTs uint64, metricNames ...string) (map[string]metrictypes.Temporality, map[string]metrictypes.Type, error)
|
||||
|
||||
// ListLogsJSONIndexes lists the JSON indexes for the logs table.
|
||||
ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error)
|
||||
ListLogsJSONIndexes(ctx context.Context, filters ...string) ([]TelemetryFieldKeySkipIndex, error)
|
||||
|
||||
// ListPromotedPaths lists the promoted paths.
|
||||
GetPromotedPaths(ctx context.Context, paths ...string) (map[string]bool, error)
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -18,7 +17,7 @@ type MockMetadataStore struct {
|
||||
TemporalityMap map[string]metrictypes.Temporality
|
||||
TypeMap map[string]metrictypes.Type
|
||||
PromotedPathsMap map[string]bool
|
||||
LogsJSONIndexesMap map[string][]schemamigrator.Index
|
||||
LogsJSONIndexes []telemetrytypes.TelemetryFieldKeySkipIndex
|
||||
ColumnEvolutionMetadataMap map[string][]*telemetrytypes.EvolutionEntry
|
||||
LookupKeysMap map[telemetrytypes.MetricMetadataLookupKey]int64
|
||||
// StaticFields holds signal-specific intrinsic field definitions (e.g. telemetrylogs.IntrinsicFields).
|
||||
@@ -34,7 +33,7 @@ func NewMockMetadataStore() *MockMetadataStore {
|
||||
TemporalityMap: make(map[string]metrictypes.Temporality),
|
||||
TypeMap: make(map[string]metrictypes.Type),
|
||||
PromotedPathsMap: make(map[string]bool),
|
||||
LogsJSONIndexesMap: make(map[string][]schemamigrator.Index),
|
||||
LogsJSONIndexes: []telemetrytypes.TelemetryFieldKeySkipIndex{},
|
||||
ColumnEvolutionMetadataMap: make(map[string][]*telemetrytypes.EvolutionEntry),
|
||||
LookupKeysMap: make(map[telemetrytypes.MetricMetadataLookupKey]int64),
|
||||
StaticFields: make(map[string]telemetrytypes.TelemetryFieldKey),
|
||||
@@ -369,8 +368,8 @@ func (m *MockMetadataStore) GetPromotedPaths(ctx context.Context, paths ...strin
|
||||
}
|
||||
|
||||
// ListLogsJSONIndexes lists the JSON indexes for the logs table.
|
||||
func (m *MockMetadataStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) (map[string][]schemamigrator.Index, error) {
|
||||
return m.LogsJSONIndexesMap, nil
|
||||
func (m *MockMetadataStore) ListLogsJSONIndexes(ctx context.Context, filters ...string) ([]telemetrytypes.TelemetryFieldKeySkipIndex, error) {
|
||||
return m.LogsJSONIndexes, nil
|
||||
}
|
||||
|
||||
func (m *MockMetadataStore) updateColumnEvolutionMetadataForKeys(_ context.Context, keysToUpdate []*telemetrytypes.TelemetryFieldKey) map[string][]*telemetrytypes.EvolutionEntry {
|
||||
|
||||
Reference in New Issue
Block a user