mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-19 07:22:29 +00:00
Compare commits
8 Commits
merge-json
...
nv/6703
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
84a7be22f7 | ||
|
|
c5923f942f | ||
|
|
f91eee7e50 | ||
|
|
5c86b80682 | ||
|
|
75512a81c6 | ||
|
|
6aaea79b73 | ||
|
|
04643264ff | ||
|
|
3aa0d8a7fd |
@@ -176,7 +176,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.111.0
|
||||
image: signoz/signoz:v0.112.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -117,7 +117,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:v0.111.0
|
||||
image: signoz/signoz:v0.112.0
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
ports:
|
||||
|
||||
@@ -179,7 +179,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.111.0}
|
||||
image: signoz/signoz:${VERSION:-v0.112.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
@@ -111,7 +111,7 @@ services:
|
||||
# - ../common/clickhouse/storage.xml:/etc/clickhouse-server/config.d/storage.xml
|
||||
signoz:
|
||||
!!merge <<: *db-depend
|
||||
image: signoz/signoz:${VERSION:-v0.111.0}
|
||||
image: signoz/signoz:${VERSION:-v0.112.0}
|
||||
container_name: signoz
|
||||
command:
|
||||
- --config=/root/config/prometheus.yml
|
||||
|
||||
3504
docs/api/openapi.yml
3504
docs/api/openapi.yml
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/SigNoz/signoz/ee/query-service/usage"
|
||||
"github.com/SigNoz/signoz/pkg/alertmanager"
|
||||
"github.com/SigNoz/signoz/pkg/global"
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
"github.com/SigNoz/signoz/pkg/http/middleware"
|
||||
querierAPI "github.com/SigNoz/signoz/pkg/querier"
|
||||
baseapp "github.com/SigNoz/signoz/pkg/query-service/app"
|
||||
@@ -106,7 +107,10 @@ func (ah *APIHandler) RegisterRoutes(router *mux.Router, am *middleware.AuthZ) {
|
||||
router.HandleFunc("/api/v4/query_range", am.ViewAccess(ah.queryRangeV4)).Methods(http.MethodPost)
|
||||
|
||||
// v5
|
||||
router.HandleFunc("/api/v5/query_range", am.ViewAccess(ah.queryRangeV5)).Methods(http.MethodPost)
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
am.ViewAccess(ah.queryRangeV5),
|
||||
querierAPI.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
|
||||
router.HandleFunc("/api/v5/substitute_vars", am.ViewAccess(ah.QuerierAPI.ReplaceVariables)).Methods(http.MethodPost)
|
||||
|
||||
|
||||
107
frontend/src/api/generated/services/query/index.ts
Normal file
107
frontend/src/api/generated/services/query/index.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* ! Do not edit manually
|
||||
* * The file has been auto-generated using Orval for SigNoz
|
||||
* * regenerate with 'yarn generate:api'
|
||||
* SigNoz
|
||||
*/
|
||||
import type {
|
||||
MutationFunction,
|
||||
UseMutationOptions,
|
||||
UseMutationResult,
|
||||
} from 'react-query';
|
||||
import { useMutation } from 'react-query';
|
||||
|
||||
import { GeneratedAPIInstance } from '../../../index';
|
||||
import type {
|
||||
Querybuildertypesv5QueryRangeRequestDTO,
|
||||
QueryRangeV5200,
|
||||
RenderErrorResponseDTO,
|
||||
} from '../sigNoz.schemas';
|
||||
|
||||
type AwaitedInput<T> = PromiseLike<T> | T;
|
||||
|
||||
type Awaited<O> = O extends AwaitedInput<infer T> ? T : never;
|
||||
|
||||
/**
|
||||
* Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.
|
||||
* @summary Query range
|
||||
*/
|
||||
export const queryRangeV5 = (
|
||||
querybuildertypesv5QueryRangeRequestDTO: Querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal?: AbortSignal,
|
||||
) => {
|
||||
return GeneratedAPIInstance<QueryRangeV5200>({
|
||||
url: `/api/v5/query_range`,
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
data: querybuildertypesv5QueryRangeRequestDTO,
|
||||
signal,
|
||||
});
|
||||
};
|
||||
|
||||
export const getQueryRangeV5MutationOptions = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationKey = ['queryRangeV5'];
|
||||
const { mutation: mutationOptions } = options
|
||||
? options.mutation &&
|
||||
'mutationKey' in options.mutation &&
|
||||
options.mutation.mutationKey
|
||||
? options
|
||||
: { ...options, mutation: { ...options.mutation, mutationKey } }
|
||||
: { mutation: { mutationKey } };
|
||||
|
||||
const mutationFn: MutationFunction<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO }
|
||||
> = (props) => {
|
||||
const { data } = props ?? {};
|
||||
|
||||
return queryRangeV5(data);
|
||||
};
|
||||
|
||||
return { mutationFn, ...mutationOptions };
|
||||
};
|
||||
|
||||
export type QueryRangeV5MutationResult = NonNullable<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>
|
||||
>;
|
||||
export type QueryRangeV5MutationBody = Querybuildertypesv5QueryRangeRequestDTO;
|
||||
export type QueryRangeV5MutationError = RenderErrorResponseDTO;
|
||||
|
||||
/**
|
||||
* @summary Query range
|
||||
*/
|
||||
export const useQueryRangeV5 = <
|
||||
TError = RenderErrorResponseDTO,
|
||||
TContext = unknown
|
||||
>(options?: {
|
||||
mutation?: UseMutationOptions<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
>;
|
||||
}): UseMutationResult<
|
||||
Awaited<ReturnType<typeof queryRangeV5>>,
|
||||
TError,
|
||||
{ data: Querybuildertypesv5QueryRangeRequestDTO },
|
||||
TContext
|
||||
> => {
|
||||
const mutationOptions = getQueryRangeV5MutationOptions(options);
|
||||
|
||||
return useMutation(mutationOptions);
|
||||
};
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,9 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
import { Card } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
@@ -10,6 +11,8 @@ import LogsError from 'container/LogsError/LogsError';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
|
||||
import useScrollToLog from 'hooks/logs/useScrollToLog';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -28,6 +31,15 @@ interface Props {
|
||||
}
|
||||
|
||||
function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
const virtuosoRef = useRef<VirtuosoHandle>(null);
|
||||
const {
|
||||
activeLog,
|
||||
onAddToQuery,
|
||||
selectedTab,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
} = useLogDetailHandlers();
|
||||
|
||||
const basePayload = getHostLogsQueryPayload(
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
@@ -72,29 +84,40 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
setIsPaginating(false);
|
||||
}, [data, setIsPaginating]);
|
||||
|
||||
const handleScrollToLog = useScrollToLog({
|
||||
logs,
|
||||
virtuosoRef,
|
||||
});
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, logToRender: ILog): JSX.Element => (
|
||||
<RawLogView
|
||||
isTextOverflowEllipsisDisabled
|
||||
key={logToRender.id}
|
||||
data={logToRender}
|
||||
linesPerRow={5}
|
||||
fontSize={FontSize.MEDIUM}
|
||||
selectedFields={[
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'body',
|
||||
},
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'timestamp',
|
||||
},
|
||||
]}
|
||||
/>
|
||||
),
|
||||
[],
|
||||
(_: number, logToRender: ILog): JSX.Element => {
|
||||
return (
|
||||
<div key={logToRender.id}>
|
||||
<RawLogView
|
||||
isTextOverflowEllipsisDisabled
|
||||
data={logToRender}
|
||||
linesPerRow={5}
|
||||
fontSize={FontSize.MEDIUM}
|
||||
selectedFields={[
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'body',
|
||||
},
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'timestamp',
|
||||
},
|
||||
]}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
isActiveLog={activeLog?.id === logToRender.id}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[activeLog, handleSetActiveLog, handleCloseLogDetail],
|
||||
);
|
||||
|
||||
const renderFooter = useCallback(
|
||||
@@ -118,6 +141,7 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
<Virtuoso
|
||||
className="host-metrics-logs-virtuoso"
|
||||
key="host-metrics-logs-virtuoso"
|
||||
ref={virtuosoRef}
|
||||
data={logs}
|
||||
endReached={loadMoreLogs}
|
||||
totalCount={logs.length}
|
||||
@@ -139,7 +163,24 @@ function HostMetricsLogs({ timeRange, filters }: Props): JSX.Element {
|
||||
{!isLoading && !isError && logs.length === 0 && <NoLogsContainer />}
|
||||
{isError && !isLoading && <LogsError />}
|
||||
{!isLoading && !isError && logs.length > 0 && (
|
||||
<div className="host-metrics-logs-list-container">{renderContent}</div>
|
||||
<div
|
||||
className="host-metrics-logs-list-container"
|
||||
data-log-detail-ignore="true"
|
||||
>
|
||||
{renderContent}
|
||||
</div>
|
||||
)}
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLog}
|
||||
selectedTab={selectedTab}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
onScrollToLog={handleScrollToLog}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -13,6 +13,9 @@ export type LogDetailProps = {
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
isListViewPanel?: boolean;
|
||||
listViewPanelSelectedFields?: IField[] | null;
|
||||
logs?: ILog[];
|
||||
onNavigateLog?: (log: ILog) => void;
|
||||
onScrollToLog?: (logId: string) => void;
|
||||
} & Pick<AddToQueryHOCProps, 'onAddToQuery'> &
|
||||
Partial<Pick<ActionItemProps, 'onClickActionItem'>> &
|
||||
Pick<DrawerProps, 'onClose'>;
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
}
|
||||
|
||||
.log-detail-drawer__title-right {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
.ant-btn {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@@ -66,6 +68,10 @@
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__content {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.log-detail-drawer__log {
|
||||
width: 100%;
|
||||
display: flex;
|
||||
@@ -183,9 +189,115 @@
|
||||
.ant-drawer-close {
|
||||
padding: 0px;
|
||||
}
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 8px 16px;
|
||||
text-align: left;
|
||||
color: var(--text-vanilla-200);
|
||||
background: var(--bg-ink-400);
|
||||
z-index: 10;
|
||||
|
||||
.log-detail-drawer__footer-hint-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-icon {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
color: var(--text-vanilla-200);
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-text {
|
||||
font-size: 13px;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
.log-arrows {
|
||||
display: flex;
|
||||
box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.1);
|
||||
border-radius: 6px;
|
||||
padding: 2px 6px;
|
||||
align-items: center;
|
||||
margin-left: 8px;
|
||||
}
|
||||
|
||||
.log-arrow-btn {
|
||||
padding: 0;
|
||||
min-width: 28px;
|
||||
height: 28px;
|
||||
border-radius: 4px;
|
||||
background: var(--bg-ink-400);
|
||||
color: var(--text-vanilla-400);
|
||||
border: 1px solid var(--bg-ink-300);
|
||||
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.08);
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
transition: background-color 0.2s ease-in-out;
|
||||
}
|
||||
|
||||
.log-arrow-btn-up,
|
||||
.log-arrow-btn-down {
|
||||
background: var(--bg-ink-400);
|
||||
}
|
||||
|
||||
.log-arrow-btn:active,
|
||||
.log-arrow-btn:focus {
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--text-vanilla-100);
|
||||
}
|
||||
|
||||
.log-arrow-btn[disabled] {
|
||||
opacity: 0.5;
|
||||
cursor: not-allowed;
|
||||
background: var(--bg-ink-500);
|
||||
color: var(--text-vanilla-200);
|
||||
|
||||
.log-arrow-btn:hover:not([disabled]) {
|
||||
background: var(--bg-ink-300);
|
||||
color: var(--text-vanilla-100);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.lightMode {
|
||||
.log-arrows {
|
||||
background: var(--bg-vanilla-100);
|
||||
box-shadow: 0 1px 4px 0 rgba(0, 0, 0, 0.04);
|
||||
}
|
||||
|
||||
.log-arrow-btn {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-400);
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.04);
|
||||
}
|
||||
|
||||
.log-arrow-btn-up,
|
||||
.log-arrow-btn-down {
|
||||
background: var(--bg-vanilla-100);
|
||||
}
|
||||
|
||||
.log-arrow-btn:active,
|
||||
.log-arrow-btn:focus {
|
||||
background: var(--bg-vanilla-200);
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
.log-arrow-btn:hover:not([disabled]) {
|
||||
background: var(--bg-vanilla-200);
|
||||
color: var(--text-ink-500);
|
||||
}
|
||||
|
||||
.log-arrow-btn[disabled] {
|
||||
background: var(--bg-vanilla-100);
|
||||
color: var(--text-ink-200);
|
||||
}
|
||||
.ant-drawer-header {
|
||||
border-bottom: 1px solid var(--bg-vanilla-400);
|
||||
background: var(--bg-vanilla-100);
|
||||
@@ -252,4 +364,33 @@
|
||||
color: var(--text-ink-300);
|
||||
}
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint {
|
||||
position: absolute;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 8px 16px;
|
||||
text-align: left;
|
||||
color: var(--text-vanilla-700);
|
||||
background: var(--bg-vanilla-100);
|
||||
z-index: 10;
|
||||
|
||||
.log-detail-drawer__footer-hint-content {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-icon {
|
||||
display: inline;
|
||||
vertical-align: middle;
|
||||
color: var(--text-vanilla-700);
|
||||
}
|
||||
|
||||
.log-detail-drawer__footer-hint-text {
|
||||
font-size: 13px;
|
||||
margin: 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable sonarjs/cognitive-complexity */
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useState } from 'react';
|
||||
import { useSelector } from 'react-redux';
|
||||
import { useCopyToClipboard, useLocation } from 'react-use';
|
||||
import { Color, Spacing } from '@signozhq/design-tokens';
|
||||
@@ -32,8 +32,12 @@ import { useSafeNavigate } from 'hooks/useSafeNavigate';
|
||||
import createQueryParams from 'lib/createQueryParams';
|
||||
import { cloneDeep } from 'lodash-es';
|
||||
import {
|
||||
ArrowDown,
|
||||
ArrowUp,
|
||||
BarChart2,
|
||||
Braces,
|
||||
ChevronDown,
|
||||
ChevronUp,
|
||||
Compass,
|
||||
Copy,
|
||||
Filter,
|
||||
@@ -60,6 +64,9 @@ function LogDetailInner({
|
||||
isListViewPanel = false,
|
||||
listViewPanelSelectedFields,
|
||||
handleChangeSelectedView,
|
||||
logs,
|
||||
onNavigateLog,
|
||||
onScrollToLog,
|
||||
}: LogDetailInnerProps): JSX.Element {
|
||||
const initialContextQuery = useInitialQuery(log);
|
||||
const [contextQuery, setContextQuery] = useState<Query | undefined>(
|
||||
@@ -74,6 +81,78 @@ function LogDetailInner({
|
||||
const [isEdit, setIsEdit] = useState<boolean>(false);
|
||||
const { stagedQuery, updateAllQueriesOperators } = useQueryBuilder();
|
||||
|
||||
// Handle clicks outside to close drawer, except on explicitly ignored regions
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (e: MouseEvent): void => {
|
||||
const target = e.target as HTMLElement;
|
||||
|
||||
// Don't close if clicking on explicitly ignored regions
|
||||
if (target.closest('[data-log-detail-ignore="true"]')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Close the drawer for any other outside click
|
||||
onClose?.(e as any);
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
|
||||
return (): void => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
};
|
||||
}, [onClose]);
|
||||
|
||||
// Keyboard navigation - handle up/down arrow keys
|
||||
// Only listen when in OVERVIEW tab
|
||||
useEffect(() => {
|
||||
if (
|
||||
!logs ||
|
||||
!onNavigateLog ||
|
||||
logs.length === 0 ||
|
||||
selectedView !== VIEW_TYPES.OVERVIEW
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handleKeyDown = (e: KeyboardEvent): void => {
|
||||
const currentIndex = logs.findIndex((l) => l.id === log.id);
|
||||
if (currentIndex === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.key === 'ArrowUp') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Navigate to previous log
|
||||
if (currentIndex > 0) {
|
||||
const prevLog = logs[currentIndex - 1];
|
||||
onNavigateLog(prevLog);
|
||||
// Trigger scroll to the log element
|
||||
if (onScrollToLog) {
|
||||
onScrollToLog(prevLog.id);
|
||||
}
|
||||
}
|
||||
} else if (e.key === 'ArrowDown') {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
// Navigate to next log
|
||||
if (currentIndex < logs.length - 1) {
|
||||
const nextLog = logs[currentIndex + 1];
|
||||
onNavigateLog(nextLog);
|
||||
// Trigger scroll to the log element
|
||||
if (onScrollToLog) {
|
||||
onScrollToLog(nextLog.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('keydown', handleKeyDown);
|
||||
return (): void => {
|
||||
document.removeEventListener('keydown', handleKeyDown);
|
||||
};
|
||||
}, [log.id, logs, onNavigateLog, onScrollToLog, selectedView]);
|
||||
|
||||
const listQuery = useMemo(() => {
|
||||
if (!stagedQuery || stagedQuery.builder.queryData.length < 1) {
|
||||
return null;
|
||||
@@ -227,32 +306,87 @@ function LogDetailInner({
|
||||
);
|
||||
|
||||
const logType = log?.attributes_string?.log_level || LogType.INFO;
|
||||
const currentLogIndex = logs ? logs.findIndex((l) => l.id === log.id) : -1;
|
||||
const isPrevDisabled =
|
||||
!logs || !onNavigateLog || logs.length === 0 || currentLogIndex <= 0;
|
||||
const isNextDisabled =
|
||||
!logs ||
|
||||
!onNavigateLog ||
|
||||
logs.length === 0 ||
|
||||
currentLogIndex === logs.length - 1;
|
||||
|
||||
type HandleNavigateLogParams = {
|
||||
direction: 'next' | 'previous';
|
||||
};
|
||||
|
||||
const handleNavigateLog = ({ direction }: HandleNavigateLogParams): void => {
|
||||
if (!logs || !onNavigateLog || currentLogIndex === -1) {
|
||||
return;
|
||||
}
|
||||
if (direction === 'previous' && !isPrevDisabled) {
|
||||
const prevLog = logs[currentLogIndex - 1];
|
||||
onNavigateLog(prevLog);
|
||||
onScrollToLog?.(prevLog.id);
|
||||
} else if (direction === 'next' && !isNextDisabled) {
|
||||
const nextLog = logs[currentLogIndex + 1];
|
||||
onNavigateLog(nextLog);
|
||||
onScrollToLog?.(nextLog.id);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Drawer
|
||||
width="60%"
|
||||
maskStyle={{ background: 'none' }}
|
||||
mask={false}
|
||||
maskClosable={false}
|
||||
title={
|
||||
<div className="log-detail-drawer__title">
|
||||
<div className="log-detail-drawer__title" data-log-detail-ignore="true">
|
||||
<div className="log-detail-drawer__title-left">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', LogType)} />
|
||||
<Typography.Text className="title">Log details</Typography.Text>
|
||||
</div>
|
||||
{showOpenInExplorerBtn && (
|
||||
<div className="log-detail-drawer__title-right">
|
||||
<Button
|
||||
className="open-in-explorer-btn"
|
||||
icon={<Compass size={16} />}
|
||||
onClick={handleOpenInExplorer}
|
||||
<div className="log-detail-drawer__title-right">
|
||||
<div className="log-arrows">
|
||||
<Tooltip
|
||||
title={isPrevDisabled ? '' : 'Move to previous log'}
|
||||
placement="top"
|
||||
mouseLeaveDelay={0}
|
||||
>
|
||||
Open in Explorer
|
||||
</Button>
|
||||
<Button
|
||||
icon={<ChevronUp size={14} />}
|
||||
className="log-arrow-btn log-arrow-btn-up"
|
||||
disabled={isPrevDisabled}
|
||||
onClick={(): void => handleNavigateLog({ direction: 'previous' })}
|
||||
/>
|
||||
</Tooltip>
|
||||
<Tooltip
|
||||
title={isNextDisabled ? '' : 'Move to next log'}
|
||||
placement="top"
|
||||
mouseLeaveDelay={0}
|
||||
>
|
||||
<Button
|
||||
icon={<ChevronDown size={14} />}
|
||||
className="log-arrow-btn log-arrow-btn-down"
|
||||
disabled={isNextDisabled}
|
||||
onClick={(): void => handleNavigateLog({ direction: 'next' })}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
{showOpenInExplorerBtn && (
|
||||
<div>
|
||||
<Button
|
||||
className="open-in-explorer-btn"
|
||||
icon={<Compass size={16} />}
|
||||
onClick={handleOpenInExplorer}
|
||||
>
|
||||
Open in Explorer
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
placement="right"
|
||||
// closable
|
||||
onClose={drawerCloseHandler}
|
||||
open={log !== null}
|
||||
style={{
|
||||
@@ -263,138 +397,164 @@ function LogDetailInner({
|
||||
destroyOnClose
|
||||
closeIcon={<X size={16} style={{ marginTop: Spacing.MARGIN_1 }} />}
|
||||
>
|
||||
<div className="log-detail-drawer__log">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
|
||||
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
|
||||
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
|
||||
</Tooltip>
|
||||
<div className="log-detail-drawer__content" data-log-detail-ignore="true">
|
||||
<div className="log-detail-drawer__log">
|
||||
<Divider type="vertical" className={cx('log-type-indicator', logType)} />
|
||||
<Tooltip title={removeEscapeCharacters(log?.body)} placement="left">
|
||||
<div className="log-body" dangerouslySetInnerHTML={htmlBody} />
|
||||
</Tooltip>
|
||||
|
||||
<div className="log-overflow-shadow"> </div>
|
||||
</div>
|
||||
<div className="log-overflow-shadow"> </div>
|
||||
</div>
|
||||
|
||||
<div className="tabs-and-search">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleModeChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.OVERVIEW}
|
||||
<div className="tabs-and-search">
|
||||
<Radio.Group
|
||||
className="views-tabs"
|
||||
onChange={handleModeChange}
|
||||
value={selectedView}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Overview
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'}
|
||||
value={VIEW_TYPES.JSON}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Braces size={14} />
|
||||
JSON
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.CONTEXT}
|
||||
>
|
||||
<div className="view-title">
|
||||
<TextSelect size={14} />
|
||||
Context
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.INFRAMETRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
<Radio.Button
|
||||
className={
|
||||
// eslint-disable-next-line sonarjs/no-duplicate-string
|
||||
selectedView === VIEW_TYPES.OVERVIEW ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.OVERVIEW}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Table size={14} />
|
||||
Overview
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.JSON ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.JSON}
|
||||
>
|
||||
<div className="view-title">
|
||||
<Braces size={14} />
|
||||
JSON
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.CONTEXT ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.CONTEXT}
|
||||
>
|
||||
<div className="view-title">
|
||||
<TextSelect size={14} />
|
||||
Context
|
||||
</div>
|
||||
</Radio.Button>
|
||||
<Radio.Button
|
||||
className={
|
||||
selectedView === VIEW_TYPES.INFRAMETRICS ? 'selected_view tab' : 'tab'
|
||||
}
|
||||
value={VIEW_TYPES.INFRAMETRICS}
|
||||
>
|
||||
<div className="view-title">
|
||||
<BarChart2 size={14} />
|
||||
Metrics
|
||||
</div>
|
||||
</Radio.Button>
|
||||
</Radio.Group>
|
||||
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<div className="log-detail-drawer__actions">
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<Tooltip
|
||||
title="Show Filters"
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label="Show Filters"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Filter size={16} />}
|
||||
onClick={handleFilterVisible}
|
||||
icon={<Copy size={16} />}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
||||
<Tooltip
|
||||
title={selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'}
|
||||
placement="topLeft"
|
||||
aria-label={
|
||||
selectedView === VIEW_TYPES.JSON ? 'Copy JSON' : 'Copy Log Link'
|
||||
}
|
||||
>
|
||||
<Button
|
||||
className="action-btn"
|
||||
icon={<Copy size={16} />}
|
||||
onClick={selectedView === VIEW_TYPES.JSON ? handleJSONCopy : onLogCopy}
|
||||
</div>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
<QuerySearch
|
||||
onChange={(value): void => handleQueryExpressionChange(value, 0)}
|
||||
dataSource={DataSource.LOGS}
|
||||
queryData={contextQuery?.builder.queryData[0]}
|
||||
onRun={handleRunQuery}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
{isFilterVisible && contextQuery?.builder.queryData[0] && (
|
||||
<div className="log-detail-drawer-query-container">
|
||||
<QuerySearch
|
||||
onChange={(value): void => handleQueryExpressionChange(value, 0)}
|
||||
dataSource={DataSource.LOGS}
|
||||
queryData={contextQuery?.builder.queryData[0]}
|
||||
onRun={handleRunQuery}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<Overview
|
||||
logData={log}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onClickActionItem}
|
||||
isListViewPanel={isListViewPanel}
|
||||
selectedOptions={options}
|
||||
listViewPanelSelectedFields={listViewPanelSelectedFields}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
|
||||
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<Overview
|
||||
logData={log}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onClickActionItem}
|
||||
isListViewPanel={isListViewPanel}
|
||||
selectedOptions={options}
|
||||
listViewPanelSelectedFields={listViewPanelSelectedFields}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.JSON && <JSONView logData={log} />}
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<ContextView
|
||||
log={log}
|
||||
filters={filters}
|
||||
contextQuery={contextQuery}
|
||||
isEdit={isEdit}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.INFRAMETRICS && (
|
||||
<InfraMetrics
|
||||
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedView === VIEW_TYPES.CONTEXT && (
|
||||
<ContextView
|
||||
log={log}
|
||||
filters={filters}
|
||||
contextQuery={contextQuery}
|
||||
isEdit={isEdit}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.INFRAMETRICS && (
|
||||
<InfraMetrics
|
||||
clusterName={log.resources_string?.[RESOURCE_KEYS.CLUSTER_NAME] || ''}
|
||||
podName={log.resources_string?.[RESOURCE_KEYS.POD_NAME] || ''}
|
||||
nodeName={log.resources_string?.[RESOURCE_KEYS.NODE_NAME] || ''}
|
||||
hostName={log.resources_string?.[RESOURCE_KEYS.HOST_NAME] || ''}
|
||||
timestamp={log.timestamp.toString()}
|
||||
dataSource={DataSource.LOGS}
|
||||
/>
|
||||
)}
|
||||
{selectedView === VIEW_TYPES.OVERVIEW && (
|
||||
<div className="log-detail-drawer__footer-hint">
|
||||
<div className="log-detail-drawer__footer-hint-content">
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="log-detail-drawer__footer-hint-text"
|
||||
>
|
||||
Use
|
||||
</Typography.Text>
|
||||
<ArrowUp size={14} className="log-detail-drawer__footer-hint-icon" />
|
||||
<span>/</span>
|
||||
<ArrowDown size={14} className="log-detail-drawer__footer-hint-icon" />
|
||||
<Typography.Text
|
||||
type="secondary"
|
||||
className="log-detail-drawer__footer-hint-text"
|
||||
>
|
||||
to view previous/next log
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</Drawer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2,13 +2,11 @@ import { memo, useCallback, useMemo } from 'react';
|
||||
import { blue } from '@ant-design/colors';
|
||||
import { Typography } from 'antd';
|
||||
import cx from 'classnames';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
// utils
|
||||
@@ -104,12 +102,17 @@ function LogSelectedField({
|
||||
type ListLogViewProps = {
|
||||
logData: ILog;
|
||||
selectedFields: IField[];
|
||||
onSetActiveLog: (log: ILog) => void;
|
||||
onSetActiveLog: (
|
||||
log: ILog,
|
||||
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
|
||||
) => void;
|
||||
onAddToQuery: AddToQueryHOCProps['onAddToQuery'];
|
||||
activeLog?: ILog | null;
|
||||
linesPerRow: number;
|
||||
fontSize: FontSize;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
isActiveLog?: boolean;
|
||||
onClearActiveLog?: () => void;
|
||||
};
|
||||
|
||||
function ListLogView({
|
||||
@@ -120,7 +123,8 @@ function ListLogView({
|
||||
activeLog,
|
||||
linesPerRow,
|
||||
fontSize,
|
||||
handleChangeSelectedView,
|
||||
isActiveLog,
|
||||
onClearActiveLog,
|
||||
}: ListLogViewProps): JSX.Element {
|
||||
const flattenLogData = useMemo(() => FlatLogData(logData), [logData]);
|
||||
|
||||
@@ -129,35 +133,24 @@ function ListLogView({
|
||||
);
|
||||
const isReadOnlyLog = !isLogsExplorerPage;
|
||||
|
||||
const {
|
||||
activeLog: activeContextLog,
|
||||
onAddToQuery: handleAddToQuery,
|
||||
onSetActiveLog: handleSetActiveContextLog,
|
||||
onClearActiveLog: handleClearActiveContextLog,
|
||||
} = useActiveLog();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
const handlerClearActiveContextLog = useCallback(
|
||||
(event: React.MouseEvent | React.KeyboardEvent) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
handleClearActiveContextLog();
|
||||
},
|
||||
[handleClearActiveContextLog],
|
||||
);
|
||||
|
||||
const handleDetailedView = useCallback(() => {
|
||||
if (isActiveLog) {
|
||||
onClearActiveLog?.();
|
||||
return;
|
||||
}
|
||||
|
||||
onSetActiveLog(logData);
|
||||
}, [logData, onSetActiveLog]);
|
||||
}, [logData, onSetActiveLog, isActiveLog, onClearActiveLog]);
|
||||
|
||||
const handleShowContext = useCallback(
|
||||
(event: React.MouseEvent) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
handleSetActiveContextLog(logData);
|
||||
onSetActiveLog(logData, VIEW_TYPES.CONTEXT);
|
||||
},
|
||||
[logData, handleSetActiveContextLog],
|
||||
[logData, onSetActiveLog],
|
||||
);
|
||||
|
||||
const updatedSelecedFields = useMemo(
|
||||
@@ -186,11 +179,7 @@ function ListLogView({
|
||||
return (
|
||||
<>
|
||||
<Container
|
||||
$isActiveLog={
|
||||
isHighlighted ||
|
||||
activeLog?.id === logData.id ||
|
||||
activeContextLog?.id === logData.id
|
||||
}
|
||||
$isActiveLog={isHighlighted || activeLog?.id === logData.id}
|
||||
$isDarkMode={isDarkMode}
|
||||
$logType={logType}
|
||||
onClick={handleDetailedView}
|
||||
@@ -251,15 +240,6 @@ function ListLogView({
|
||||
/>
|
||||
)}
|
||||
</Container>
|
||||
{activeContextLog && (
|
||||
<LogDetail
|
||||
log={activeContextLog}
|
||||
onAddToQuery={handleAddToQuery}
|
||||
selectedTab={VIEW_TYPES.CONTEXT}
|
||||
onClose={handlerClearActiveContextLog}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import {
|
||||
KeyboardEvent,
|
||||
memo,
|
||||
MouseEvent,
|
||||
MouseEventHandler,
|
||||
useCallback,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react';
|
||||
import { Color } from '@signozhq/design-tokens';
|
||||
import { DrawerProps, Tooltip } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES, VIEWS } from 'components/LogDetail/constants';
|
||||
import { Tooltip } from 'antd';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import { DATE_TIME_FORMATS } from 'constants/dateTimeFormats';
|
||||
import { getSanitizedLogBody } from 'container/LogDetailedView/utils';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
// hooks
|
||||
import { useIsDarkMode } from 'hooks/useDarkMode';
|
||||
@@ -39,7 +35,8 @@ function RawLogView({
|
||||
selectedFields = [],
|
||||
fontSize,
|
||||
onLogClick,
|
||||
handleChangeSelectedView,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
}: RawLogViewProps): JSX.Element {
|
||||
const {
|
||||
isHighlighted: isUrlHighlighted,
|
||||
@@ -48,15 +45,6 @@ function RawLogView({
|
||||
} = useCopyLogLink(data.id);
|
||||
const flattenLogData = useMemo(() => FlatLogData(data), [data]);
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
|
||||
const [selectedTab, setSelectedTab] = useState<VIEWS | undefined>();
|
||||
|
||||
const isDarkMode = useIsDarkMode();
|
||||
const isReadOnlyLog = !isLogsExplorerPage || isReadOnly;
|
||||
|
||||
@@ -134,34 +122,24 @@ function RawLogView({
|
||||
// Use custom click handler if provided, otherwise use default behavior
|
||||
if (onLogClick) {
|
||||
onLogClick(data, event);
|
||||
} else {
|
||||
onSetActiveLog(data);
|
||||
setSelectedTab(VIEW_TYPES.OVERVIEW);
|
||||
return;
|
||||
}
|
||||
if (isActiveLog) {
|
||||
onClearActiveLog?.();
|
||||
return;
|
||||
}
|
||||
},
|
||||
[isReadOnly, data, onSetActiveLog, onLogClick],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail: DrawerProps['onClose'] = useCallback(
|
||||
(
|
||||
event: MouseEvent<Element, globalThis.MouseEvent> | KeyboardEvent<Element>,
|
||||
) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
onSetActiveLog?.(data);
|
||||
},
|
||||
[onClearActiveLog],
|
||||
[isReadOnly, onLogClick, isActiveLog, onSetActiveLog, data, onClearActiveLog],
|
||||
);
|
||||
|
||||
const handleShowContext: MouseEventHandler<HTMLElement> = useCallback(
|
||||
(event) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
// handleSetActiveContextLog(data);
|
||||
setSelectedTab(VIEW_TYPES.CONTEXT);
|
||||
onSetActiveLog(data);
|
||||
|
||||
onSetActiveLog?.(data, VIEW_TYPES.CONTEXT);
|
||||
},
|
||||
[data, onSetActiveLog],
|
||||
);
|
||||
@@ -181,7 +159,7 @@ function RawLogView({
|
||||
$isDarkMode={isDarkMode}
|
||||
$isReadOnly={isReadOnly}
|
||||
$isHightlightedLog={isUrlHighlighted}
|
||||
$isActiveLog={activeLog?.id === data.id || isActiveLog}
|
||||
$isActiveLog={isActiveLog}
|
||||
$isCustomHighlighted={isHighlighted}
|
||||
$logType={logType}
|
||||
fontSize={fontSize}
|
||||
@@ -218,17 +196,6 @@ function RawLogView({
|
||||
onLogCopy={onLogCopy}
|
||||
/>
|
||||
)}
|
||||
|
||||
{selectedTab && (
|
||||
<LogDetail
|
||||
selectedTab={selectedTab}
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
)}
|
||||
</RawLogViewContainer>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -45,9 +45,6 @@ export const RawLogViewContainer = styled(Row)<{
|
||||
: `margin: 2px 0;`}
|
||||
}
|
||||
|
||||
${({ $isActiveLog, $logType }): string =>
|
||||
getActiveLogBackground($isActiveLog, true, $logType)}
|
||||
|
||||
${({ $isReadOnly, $isActiveLog, $isDarkMode, $logType }): string =>
|
||||
$isActiveLog
|
||||
? getActiveLogBackground($isActiveLog, $isDarkMode, $logType)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { MouseEvent } from 'react';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { IField } from 'types/api/logs/fields';
|
||||
@@ -16,6 +17,11 @@ export interface RawLogViewProps {
|
||||
selectedFields?: IField[];
|
||||
onLogClick?: (log: ILog, event: MouseEvent) => void;
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
onSetActiveLog?: (
|
||||
log: ILog,
|
||||
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
|
||||
) => void;
|
||||
onClearActiveLog?: () => void;
|
||||
}
|
||||
|
||||
export interface RawLogContentProps {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
/* eslint-disable no-nested-ternary */
|
||||
import { useCallback, useEffect, useMemo } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { useQuery } from 'react-query';
|
||||
import { Virtuoso } from 'react-virtuoso';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
import { Card } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { DEFAULT_ENTITY_VERSION } from 'constants/app';
|
||||
@@ -11,6 +12,8 @@ import LogsError from 'container/LogsError/LogsError';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useHandleLogsPagination } from 'hooks/infraMonitoring/useHandleLogsPagination';
|
||||
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
|
||||
import useScrollToLog from 'hooks/logs/useScrollToLog';
|
||||
import { GetMetricQueryRange } from 'lib/dashboard/getQueryResults';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
import { IBuilderQuery } from 'types/api/queryBuilder/queryBuilderData';
|
||||
@@ -40,6 +43,15 @@ function EntityLogs({
|
||||
category,
|
||||
queryKeyFilters,
|
||||
}: Props): JSX.Element {
|
||||
const virtuosoRef = useRef<VirtuosoHandle>(null);
|
||||
const {
|
||||
activeLog,
|
||||
onAddToQuery,
|
||||
selectedTab,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
} = useLogDetailHandlers();
|
||||
|
||||
const basePayload = getEntityEventsOrLogsQueryPayload(
|
||||
timeRange.startTime,
|
||||
timeRange.endTime,
|
||||
@@ -62,29 +74,40 @@ function EntityLogs({
|
||||
basePayload,
|
||||
});
|
||||
|
||||
const handleScrollToLog = useScrollToLog({
|
||||
logs,
|
||||
virtuosoRef,
|
||||
});
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, logToRender: ILog): JSX.Element => (
|
||||
<RawLogView
|
||||
isTextOverflowEllipsisDisabled
|
||||
key={logToRender.id}
|
||||
data={logToRender}
|
||||
linesPerRow={5}
|
||||
fontSize={FontSize.MEDIUM}
|
||||
selectedFields={[
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'body',
|
||||
},
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'timestamp',
|
||||
},
|
||||
]}
|
||||
/>
|
||||
),
|
||||
[],
|
||||
(_: number, logToRender: ILog): JSX.Element => {
|
||||
return (
|
||||
<div key={logToRender.id}>
|
||||
<RawLogView
|
||||
isTextOverflowEllipsisDisabled
|
||||
data={logToRender}
|
||||
linesPerRow={5}
|
||||
fontSize={FontSize.MEDIUM}
|
||||
selectedFields={[
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'body',
|
||||
},
|
||||
{
|
||||
dataType: 'string',
|
||||
type: '',
|
||||
name: 'timestamp',
|
||||
},
|
||||
]}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
isActiveLog={activeLog?.id === logToRender.id}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[activeLog, handleSetActiveLog, handleCloseLogDetail],
|
||||
);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useQuery({
|
||||
@@ -131,6 +154,7 @@ function EntityLogs({
|
||||
<Virtuoso
|
||||
className="entity-logs-virtuoso"
|
||||
key="entity-logs-virtuoso"
|
||||
ref={virtuosoRef}
|
||||
data={logs}
|
||||
endReached={loadMoreLogs}
|
||||
totalCount={logs.length}
|
||||
@@ -154,7 +178,21 @@ function EntityLogs({
|
||||
)}
|
||||
{isError && !isLoading && <LogsError />}
|
||||
{!isLoading && !isError && logs.length > 0 && (
|
||||
<div className="entity-logs-list-container">{renderContent}</div>
|
||||
<div className="entity-logs-list-container" data-log-detail-ignore="true">
|
||||
{renderContent}
|
||||
</div>
|
||||
)}
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLog}
|
||||
selectedTab={selectedTab}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
onScrollToLog={handleScrollToLog}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -2,7 +2,6 @@ import { memo, useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
|
||||
import { Card, Typography } from 'antd';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import ListLogView from 'components/Logs/ListLogView';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
@@ -14,8 +13,9 @@ import { InfinityWrapperStyled } from 'container/LogsExplorerList/styles';
|
||||
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { defaultLogsSelectedColumns } from 'container/OptionsMenu/constants';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
|
||||
import useScrollToLog from 'hooks/logs/useScrollToLog';
|
||||
import { useEventSource } from 'providers/EventSource';
|
||||
// interfaces
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
@@ -38,10 +38,11 @@ function LiveLogsList({
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
selectedTab,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
} = useLogDetailHandlers();
|
||||
|
||||
// get only data from the logs object
|
||||
const formattedLogs: ILog[] = useMemo(
|
||||
@@ -65,42 +66,56 @@ function LiveLogsList({
|
||||
...options.selectColumns,
|
||||
]);
|
||||
|
||||
const handleScrollToLog = useScrollToLog({
|
||||
logs: formattedLogs,
|
||||
virtuosoRef: ref,
|
||||
});
|
||||
|
||||
const getItemContent = useCallback(
|
||||
(_: number, log: ILog): JSX.Element => {
|
||||
if (options.format === 'raw') {
|
||||
return (
|
||||
<RawLogView
|
||||
key={log.id}
|
||||
data={log}
|
||||
linesPerRow={options.maxLines}
|
||||
selectedFields={selectedFields}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
<div key={log.id}>
|
||||
<RawLogView
|
||||
data={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
linesPerRow={options.maxLines}
|
||||
selectedFields={selectedFields}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selectedFields}
|
||||
linesPerRow={options.maxLines}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
<div key={log.id}>
|
||||
<ListLogView
|
||||
logData={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
selectedFields={selectedFields}
|
||||
linesPerRow={options.maxLines}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[
|
||||
handleChangeSelectedView,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
options.fontSize,
|
||||
options.format,
|
||||
options.maxLines,
|
||||
options.fontSize,
|
||||
activeLog?.id,
|
||||
selectedFields,
|
||||
onAddToQuery,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
handleChangeSelectedView,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -156,6 +171,10 @@ function LiveLogsList({
|
||||
activeLogIndex,
|
||||
}}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={formattedLogs}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
activeLog={activeLog}
|
||||
/>
|
||||
) : (
|
||||
<Card style={{ width: '100%' }} bodyStyle={CARD_BODY_STYLE}>
|
||||
@@ -173,14 +192,17 @@ function LiveLogsList({
|
||||
</InfinityWrapperStyled>
|
||||
)}
|
||||
|
||||
{activeLog && (
|
||||
{activeLog && selectedTab && (
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
selectedTab={selectedTab}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={formattedLogs}
|
||||
onNavigateLog={handleSetActiveLog}
|
||||
onScrollToLog={handleScrollToLog}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -395,7 +395,7 @@ export default function TableViewActions(
|
||||
onOpenChange={setIsOpen}
|
||||
arrow={false}
|
||||
content={
|
||||
<div>
|
||||
<div data-log-detail-ignore="true">
|
||||
<Button
|
||||
className="more-filter-actions"
|
||||
type="text"
|
||||
@@ -481,7 +481,7 @@ export default function TableViewActions(
|
||||
onOpenChange={setIsOpen}
|
||||
arrow={false}
|
||||
content={
|
||||
<div>
|
||||
<div data-log-detail-ignore="true">
|
||||
<Button
|
||||
className="more-filter-actions"
|
||||
type="text"
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
useMemo,
|
||||
} from 'react';
|
||||
import { ColumnsType } from 'antd/es/table';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import LogLinesActionButtons from 'components/Logs/LogLinesActionButtons/LogLinesActionButtons';
|
||||
import { ColumnTypeRender } from 'components/Logs/TableView/types';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
@@ -22,22 +23,27 @@ interface TableRowProps {
|
||||
tableColumns: ColumnsType<Record<string, unknown>>;
|
||||
index: number;
|
||||
log: Record<string, unknown>;
|
||||
handleSetActiveContextLog: (log: ILog) => void;
|
||||
onShowLogDetails: (log: ILog) => void;
|
||||
onShowLogDetails?: (
|
||||
log: ILog,
|
||||
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
|
||||
) => void;
|
||||
logs: ILog[];
|
||||
hasActions: boolean;
|
||||
fontSize: FontSize;
|
||||
isActiveLog?: boolean;
|
||||
onClearActiveLog?: () => void;
|
||||
}
|
||||
|
||||
export default function TableRow({
|
||||
tableColumns,
|
||||
index,
|
||||
log,
|
||||
handleSetActiveContextLog,
|
||||
onShowLogDetails,
|
||||
logs,
|
||||
hasActions,
|
||||
fontSize,
|
||||
isActiveLog,
|
||||
onClearActiveLog,
|
||||
}: TableRowProps): JSX.Element {
|
||||
const isDarkMode = useIsDarkMode();
|
||||
|
||||
@@ -52,21 +58,31 @@ export default function TableRow({
|
||||
(event) => {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
if (!handleSetActiveContextLog || !currentLog) {
|
||||
if (!currentLog) {
|
||||
return;
|
||||
}
|
||||
|
||||
handleSetActiveContextLog(currentLog);
|
||||
onShowLogDetails?.(currentLog, VIEW_TYPES.CONTEXT);
|
||||
},
|
||||
[currentLog, handleSetActiveContextLog],
|
||||
[currentLog, onShowLogDetails],
|
||||
);
|
||||
|
||||
const handleShowLogDetails = useCallback(() => {
|
||||
if (!onShowLogDetails || !currentLog) {
|
||||
if (!currentLog) {
|
||||
return;
|
||||
}
|
||||
onShowLogDetails(currentLog);
|
||||
}, [currentLog, onShowLogDetails]);
|
||||
|
||||
// If this log is already active, close the detail drawer
|
||||
if (isActiveLog && onClearActiveLog) {
|
||||
onClearActiveLog();
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, open the detail drawer for this log
|
||||
if (onShowLogDetails) {
|
||||
onShowLogDetails(currentLog);
|
||||
}
|
||||
}, [currentLog, onShowLogDetails, isActiveLog, onClearActiveLog]);
|
||||
|
||||
const hasSingleColumn =
|
||||
tableColumns.filter((column) => column.key !== 'state-indicator').length ===
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
TableVirtuoso,
|
||||
TableVirtuosoHandle,
|
||||
} from 'react-virtuoso';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import { getLogIndicatorType } from 'components/Logs/LogStateIndicator/utils';
|
||||
import { useTableView } from 'components/Logs/TableView/useTableView';
|
||||
@@ -58,26 +57,40 @@ const CustomTableRow: TableComponents<ILog>['TableRow'] = ({
|
||||
|
||||
const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
function InfinityTableView(
|
||||
{ isLoading, tableViewProps, infitiyTableProps, handleChangeSelectedView },
|
||||
ref,
|
||||
): JSX.Element | null {
|
||||
const {
|
||||
activeLog: activeContextLog,
|
||||
onSetActiveLog: handleSetActiveContextLog,
|
||||
onClearActiveLog: handleClearActiveContextLog,
|
||||
onAddToQuery: handleAddToQuery,
|
||||
} = useActiveLog();
|
||||
const {
|
||||
activeLog,
|
||||
{
|
||||
isLoading,
|
||||
tableViewProps,
|
||||
infitiyTableProps,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
activeLog,
|
||||
},
|
||||
ref,
|
||||
): JSX.Element | null {
|
||||
const { activeLog: activeContextLog } = useActiveLog();
|
||||
|
||||
const onSetActiveLogExpand = useCallback(
|
||||
(log: ILog) => {
|
||||
onSetActiveLog?.(log);
|
||||
},
|
||||
[onSetActiveLog],
|
||||
);
|
||||
|
||||
const onSetActiveLogContext = useCallback(
|
||||
(log: ILog) => {
|
||||
onSetActiveLog?.(log, VIEW_TYPES.CONTEXT);
|
||||
},
|
||||
[onSetActiveLog],
|
||||
);
|
||||
|
||||
const onCloseActiveLog = useCallback(() => {
|
||||
onClearActiveLog?.();
|
||||
}, [onClearActiveLog]);
|
||||
|
||||
const { dataSource, columns } = useTableView({
|
||||
...tableViewProps,
|
||||
onClickExpand: onSetActiveLog,
|
||||
onOpenLogsContext: handleSetActiveContextLog,
|
||||
onClickExpand: onSetActiveLogExpand,
|
||||
onOpenLogsContext: onSetActiveLogContext,
|
||||
});
|
||||
|
||||
const { draggedColumns, onDragColumns } = useDragColumns<
|
||||
@@ -98,27 +111,32 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
);
|
||||
|
||||
const itemContent = useCallback(
|
||||
(index: number, log: Record<string, unknown>): JSX.Element => (
|
||||
<TableRow
|
||||
tableColumns={tableColumns}
|
||||
index={index}
|
||||
log={log}
|
||||
handleSetActiveContextLog={handleSetActiveContextLog}
|
||||
logs={tableViewProps.logs}
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
/>
|
||||
),
|
||||
(index: number, log: Record<string, unknown>): JSX.Element => {
|
||||
return (
|
||||
<div key={log.id as string}>
|
||||
<TableRow
|
||||
tableColumns={tableColumns}
|
||||
index={index}
|
||||
log={log}
|
||||
logs={tableViewProps.logs}
|
||||
hasActions
|
||||
fontSize={tableViewProps.fontSize}
|
||||
onShowLogDetails={onSetActiveLog}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
onClearActiveLog={onCloseActiveLog}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[
|
||||
handleSetActiveContextLog,
|
||||
tableColumns,
|
||||
tableViewProps.fontSize,
|
||||
tableViewProps.logs,
|
||||
onSetActiveLog,
|
||||
tableViewProps.logs,
|
||||
tableViewProps.fontSize,
|
||||
activeLog?.id,
|
||||
onCloseActiveLog,
|
||||
],
|
||||
);
|
||||
|
||||
const tableHeader = useCallback(
|
||||
() => (
|
||||
<tr>
|
||||
@@ -179,24 +197,6 @@ const InfinityTable = forwardRef<TableVirtuosoHandle, InfinityTableProps>(
|
||||
? { endReached: infitiyTableProps.onEndReached }
|
||||
: {})}
|
||||
/>
|
||||
|
||||
{activeContextLog && (
|
||||
<LogDetail
|
||||
log={activeContextLog}
|
||||
onClose={handleClearActiveContextLog}
|
||||
onAddToQuery={handleAddToQuery}
|
||||
selectedTab={VIEW_TYPES.CONTEXT}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
)}
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
},
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import { UseTableViewProps } from 'components/Logs/TableView/types';
|
||||
import { ChangeViewFunctionType } from 'container/ExplorerOptions/types';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
export type InfinityTableProps = {
|
||||
isLoading?: boolean;
|
||||
@@ -8,4 +10,11 @@ export type InfinityTableProps = {
|
||||
onEndReached: (index: number) => void;
|
||||
};
|
||||
handleChangeSelectedView?: ChangeViewFunctionType;
|
||||
logs?: ILog[];
|
||||
onSetActiveLog?: (
|
||||
log: ILog,
|
||||
selectedTab?: typeof VIEW_TYPES[keyof typeof VIEW_TYPES],
|
||||
) => void;
|
||||
onClearActiveLog?: () => void;
|
||||
activeLog?: ILog | null;
|
||||
};
|
||||
|
||||
@@ -4,7 +4,6 @@ import { Card } from 'antd';
|
||||
import logEvent from 'api/common/logEvent';
|
||||
import ErrorInPlace from 'components/ErrorInPlace/ErrorInPlace';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
// components
|
||||
import ListLogView from 'components/Logs/ListLogView';
|
||||
import RawLogView from 'components/Logs/RawLogView';
|
||||
@@ -16,8 +15,9 @@ import EmptyLogsSearch from 'container/EmptyLogsSearch/EmptyLogsSearch';
|
||||
import { LogsLoading } from 'container/LogsLoading/LogsLoading';
|
||||
import { useOptionsMenu } from 'container/OptionsMenu';
|
||||
import { FontSize } from 'container/OptionsMenu/types';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { useCopyLogLink } from 'hooks/logs/useCopyLogLink';
|
||||
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
|
||||
import useScrollToLog from 'hooks/logs/useScrollToLog';
|
||||
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
|
||||
import APIError from 'types/api/error';
|
||||
// interfaces
|
||||
@@ -55,10 +55,11 @@ function LogsExplorerList({
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
} = useActiveLog();
|
||||
selectedTab,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
} = useLogDetailHandlers();
|
||||
|
||||
const { options } = useOptionsMenu({
|
||||
storageKey: LOCALSTORAGE.LOGS_LIST_OPTIONS,
|
||||
@@ -82,6 +83,12 @@ function LogsExplorerList({
|
||||
() => convertKeysToColumnFields(options.selectColumns),
|
||||
[options],
|
||||
);
|
||||
|
||||
const handleScrollToLog = useScrollToLog({
|
||||
logs,
|
||||
virtuosoRef: ref,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!isLoading && !isFetching && !isError && logs.length !== 0) {
|
||||
logEvent('Logs Explorer: Data present', {
|
||||
@@ -94,40 +101,48 @@ function LogsExplorerList({
|
||||
(_: number, log: ILog): JSX.Element => {
|
||||
if (options.format === 'raw') {
|
||||
return (
|
||||
<RawLogView
|
||||
key={log.id}
|
||||
data={log}
|
||||
linesPerRow={options.maxLines}
|
||||
selectedFields={selectedFields}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
<div key={log.id}>
|
||||
<RawLogView
|
||||
data={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
linesPerRow={options.maxLines}
|
||||
selectedFields={selectedFields}
|
||||
fontSize={options.fontSize}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<ListLogView
|
||||
key={log.id}
|
||||
logData={log}
|
||||
selectedFields={selectedFields}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={onSetActiveLog}
|
||||
activeLog={activeLog}
|
||||
fontSize={options.fontSize}
|
||||
linesPerRow={options.maxLines}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
<div key={log.id}>
|
||||
<ListLogView
|
||||
logData={log}
|
||||
isActiveLog={activeLog?.id === log.id}
|
||||
selectedFields={selectedFields}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
activeLog={activeLog}
|
||||
fontSize={options.fontSize}
|
||||
linesPerRow={options.maxLines}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
[
|
||||
activeLog,
|
||||
handleChangeSelectedView,
|
||||
onAddToQuery,
|
||||
onSetActiveLog,
|
||||
options.fontSize,
|
||||
options.format,
|
||||
options.fontSize,
|
||||
options.maxLines,
|
||||
activeLog,
|
||||
selectedFields,
|
||||
onAddToQuery,
|
||||
handleSetActiveLog,
|
||||
handleChangeSelectedView,
|
||||
handleCloseLogDetail,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -153,6 +168,10 @@ function LogsExplorerList({
|
||||
}}
|
||||
infitiyTableProps={{ onEndReached }}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={logs}
|
||||
onSetActiveLog={handleSetActiveLog}
|
||||
onClearActiveLog={handleCloseLogDetail}
|
||||
activeLog={activeLog}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -199,6 +218,9 @@ function LogsExplorerList({
|
||||
getItemContent,
|
||||
selectedFields,
|
||||
handleChangeSelectedView,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
activeLog,
|
||||
]);
|
||||
|
||||
const isTraceToLogsNavigation = useMemo(() => {
|
||||
@@ -278,14 +300,19 @@ function LogsExplorerList({
|
||||
{renderContent}
|
||||
</InfinityWrapperStyled>
|
||||
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
/>
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
selectedTab={selectedTab}
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
handleChangeSelectedView={handleChangeSelectedView}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLog}
|
||||
onScrollToLog={handleScrollToLog}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -466,7 +466,10 @@ function LogsExplorerViewsContainer({
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="logs-explorer-views-type-content">
|
||||
<div
|
||||
className="logs-explorer-views-type-content"
|
||||
data-log-detail-ignore="true"
|
||||
>
|
||||
{showLiveLogs && (
|
||||
<LiveLogs handleChangeSelectedView={handleChangeSelectedView} />
|
||||
)}
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
} from 'react';
|
||||
import { UseQueryResult } from 'react-query';
|
||||
import LogDetail from 'components/LogDetail';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import OverlayScrollbar from 'components/OverlayScrollbar/OverlayScrollbar';
|
||||
import { ResizeTable } from 'components/ResizeTable';
|
||||
import { SOMETHING_WENT_WRONG } from 'constants/api';
|
||||
@@ -16,7 +15,7 @@ import { PANEL_TYPES } from 'constants/queryBuilder';
|
||||
import Controls from 'container/Controls';
|
||||
import { PER_PAGE_OPTIONS } from 'container/TracesExplorer/ListView/configs';
|
||||
import { tableStyles } from 'container/TracesExplorer/ListView/styles';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import useLogDetailHandlers from 'hooks/logs/useLogDetailHandlers';
|
||||
import { useLogsData } from 'hooks/useLogsData';
|
||||
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
|
||||
import { FlatLogData } from 'lib/logs/flatLogData';
|
||||
@@ -83,24 +82,24 @@ function LogsPanelComponent({
|
||||
() => logs.map((log) => FlatLogData(log) as RowData),
|
||||
[logs],
|
||||
);
|
||||
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
selectedTab,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
} = useLogDetailHandlers();
|
||||
|
||||
const handleRow = useCallback(
|
||||
(record: RowData): HTMLAttributes<RowData> => ({
|
||||
onClick: (): void => {
|
||||
const log = logs.find((item) => item.id === record.id);
|
||||
if (log) {
|
||||
onSetActiveLog(log);
|
||||
handleSetActiveLog(log);
|
||||
}
|
||||
},
|
||||
}),
|
||||
[logs, onSetActiveLog],
|
||||
[handleSetActiveLog, logs],
|
||||
);
|
||||
|
||||
const handleRequestData = (newOffset: number): void => {
|
||||
@@ -132,7 +131,7 @@ function LogsPanelComponent({
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="logs-table">
|
||||
<div className="logs-table" data-log-detail-ignore="true">
|
||||
<div className="resize-table">
|
||||
<OverlayScrollbar>
|
||||
<ResizeTable
|
||||
@@ -166,15 +165,19 @@ function LogsPanelComponent({
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<LogDetail
|
||||
selectedTab={VIEW_TYPES.OVERVIEW}
|
||||
log={activeLog}
|
||||
onClose={onClearActiveLog}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
isListViewPanel
|
||||
listViewPanelSelectedFields={widget?.selectedLogFields}
|
||||
/>
|
||||
{selectedTab && activeLog && (
|
||||
<LogDetail
|
||||
selectedTab={selectedTab}
|
||||
log={activeLog}
|
||||
onClose={handleCloseLogDetail}
|
||||
onAddToQuery={onAddToQuery}
|
||||
onClickActionItem={onAddToQuery}
|
||||
isListViewPanel
|
||||
listViewPanelSelectedFields={widget?.selectedLogFields}
|
||||
logs={logs}
|
||||
onNavigateLog={handleSetActiveLog}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
59
frontend/src/hooks/logs/useLogDetailHandlers.ts
Normal file
59
frontend/src/hooks/logs/useLogDetailHandlers.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { useCallback, useState } from 'react';
|
||||
import { VIEW_TYPES } from 'components/LogDetail/constants';
|
||||
import type { UseActiveLog } from 'hooks/logs/types';
|
||||
import { useActiveLog } from 'hooks/logs/useActiveLog';
|
||||
import { ILog } from 'types/api/logs/log';
|
||||
|
||||
type SelectedTab = typeof VIEW_TYPES[keyof typeof VIEW_TYPES] | undefined;
|
||||
|
||||
type UseLogDetailHandlersParams = {
|
||||
defaultTab?: SelectedTab;
|
||||
};
|
||||
|
||||
type UseLogDetailHandlersResult = {
|
||||
activeLog: UseActiveLog['activeLog'];
|
||||
onAddToQuery: UseActiveLog['onAddToQuery'];
|
||||
selectedTab: SelectedTab;
|
||||
handleSetActiveLog: (log: ILog, selectedTab?: SelectedTab) => void;
|
||||
handleCloseLogDetail: () => void;
|
||||
};
|
||||
|
||||
function useLogDetailHandlers({
|
||||
defaultTab = VIEW_TYPES.OVERVIEW,
|
||||
}: UseLogDetailHandlersParams = {}): UseLogDetailHandlersResult {
|
||||
const {
|
||||
activeLog,
|
||||
onSetActiveLog,
|
||||
onClearActiveLog,
|
||||
onAddToQuery,
|
||||
} = useActiveLog();
|
||||
const [selectedTab, setSelectedTab] = useState<SelectedTab>(defaultTab);
|
||||
|
||||
const handleSetActiveLog = useCallback(
|
||||
(log: ILog, nextTab: SelectedTab = defaultTab): void => {
|
||||
if (activeLog?.id === log.id) {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
return;
|
||||
}
|
||||
onSetActiveLog(log);
|
||||
setSelectedTab(nextTab ?? defaultTab);
|
||||
},
|
||||
[activeLog?.id, defaultTab, onClearActiveLog, onSetActiveLog],
|
||||
);
|
||||
|
||||
const handleCloseLogDetail = useCallback((): void => {
|
||||
onClearActiveLog();
|
||||
setSelectedTab(undefined);
|
||||
}, [onClearActiveLog]);
|
||||
|
||||
return {
|
||||
activeLog,
|
||||
onAddToQuery,
|
||||
selectedTab,
|
||||
handleSetActiveLog,
|
||||
handleCloseLogDetail,
|
||||
};
|
||||
}
|
||||
|
||||
export default useLogDetailHandlers;
|
||||
28
frontend/src/hooks/logs/useScrollToLog.ts
Normal file
28
frontend/src/hooks/logs/useScrollToLog.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { useCallback } from 'react';
|
||||
import type { VirtuosoHandle } from 'react-virtuoso';
|
||||
|
||||
type UseScrollToLogParams = {
|
||||
logs: Array<{ id: string }>;
|
||||
virtuosoRef: React.RefObject<VirtuosoHandle | null>;
|
||||
};
|
||||
|
||||
function useScrollToLog({
|
||||
logs,
|
||||
virtuosoRef,
|
||||
}: UseScrollToLogParams): (logId: string) => void {
|
||||
return useCallback(
|
||||
(logId: string): void => {
|
||||
const logIndex = logs.findIndex(({ id }) => id === logId);
|
||||
if (logIndex !== -1 && virtuosoRef.current) {
|
||||
virtuosoRef.current.scrollToIndex({
|
||||
index: logIndex,
|
||||
align: 'center',
|
||||
behavior: 'smooth',
|
||||
});
|
||||
}
|
||||
},
|
||||
[logs, virtuosoRef],
|
||||
);
|
||||
}
|
||||
|
||||
export default useScrollToLog;
|
||||
@@ -567,6 +567,15 @@ body {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
}
|
||||
|
||||
.ant-tooltip {
|
||||
--antd-arrow-background-color: var(--bg-vanilla-100);
|
||||
|
||||
.ant-tooltip-inner {
|
||||
background-color: var(--bg-vanilla-100);
|
||||
color: var(---bg-ink-500);
|
||||
}
|
||||
}
|
||||
|
||||
.ant-dropdown-menu {
|
||||
border: 1px solid var(--bg-vanilla-300);
|
||||
background: var(--bg-vanilla-100);
|
||||
|
||||
@@ -9,10 +9,9 @@ export const getDefaultLogBackground = (
|
||||
if (isReadOnly) {
|
||||
return '';
|
||||
}
|
||||
// TODO handle the light mode here
|
||||
return `&:hover {
|
||||
background-color: ${
|
||||
isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'var(--bg-vanilla-200)'
|
||||
isDarkMode ? 'rgba(171, 189, 255, 0.04)' : 'rgba(0, 0, 0, 0.04)'
|
||||
};
|
||||
}`;
|
||||
};
|
||||
@@ -28,22 +27,38 @@ export const getActiveLogBackground = (
|
||||
if (isDarkMode) {
|
||||
switch (logType) {
|
||||
case LogType.INFO:
|
||||
return `background-color: ${Color.BG_ROBIN_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_ROBIN_500}40 !important;`;
|
||||
case LogType.WARN:
|
||||
return `background-color: ${Color.BG_AMBER_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_AMBER_500}40 !important;`;
|
||||
case LogType.ERROR:
|
||||
return `background-color: ${Color.BG_CHERRY_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_CHERRY_500}40 !important;`;
|
||||
case LogType.TRACE:
|
||||
return `background-color: ${Color.BG_FOREST_400}10 !important;`;
|
||||
return `background-color: ${Color.BG_FOREST_400}40 !important;`;
|
||||
case LogType.DEBUG:
|
||||
return `background-color: ${Color.BG_AQUA_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_AQUA_500}40 !important;`;
|
||||
case LogType.FATAL:
|
||||
return `background-color: ${Color.BG_SAKURA_500}10 !important;`;
|
||||
return `background-color: ${Color.BG_SAKURA_500}40 !important;`;
|
||||
default:
|
||||
return `background-color: ${Color.BG_SLATE_200} !important;`;
|
||||
return `background-color: ${Color.BG_ROBIN_500}40 !important;`;
|
||||
}
|
||||
}
|
||||
return `background-color: ${Color.BG_VANILLA_400}!important; color: ${Color.TEXT_SLATE_400} !important;`;
|
||||
// Light mode - use lighter background colors
|
||||
switch (logType) {
|
||||
case LogType.INFO:
|
||||
return `background-color: ${Color.BG_ROBIN_100} !important;`;
|
||||
case LogType.WARN:
|
||||
return `background-color: ${Color.BG_AMBER_100} !important;`;
|
||||
case LogType.ERROR:
|
||||
return `background-color: ${Color.BG_CHERRY_100} !important;`;
|
||||
case LogType.TRACE:
|
||||
return `background-color: ${Color.BG_FOREST_200} !important;`;
|
||||
case LogType.DEBUG:
|
||||
return `background-color: ${Color.BG_AQUA_100} !important;`;
|
||||
case LogType.FATAL:
|
||||
return `background-color: ${Color.BG_SAKURA_100} !important;`;
|
||||
default:
|
||||
return `background-color: ${Color.BG_VANILLA_300} !important;`;
|
||||
}
|
||||
};
|
||||
|
||||
export const getHightLightedLogBackground = (
|
||||
|
||||
287
go.mod
287
go.mod
@@ -3,22 +3,23 @@ module github.com/SigNoz/signoz
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.2
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.16
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.142.1-rc.1
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.16.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/go-openapi/strfmt v0.24.0
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
github.com/go-redis/redismock/v9 v9.2.0
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
@@ -29,22 +30,22 @@ require (
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/json-iterator/go v1.1.13-0.20220915233716-71ac16282d12
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.3.0
|
||||
github.com/mailru/easyjson v0.9.0
|
||||
github.com/open-telemetry/opamp-go v0.22.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.142.0
|
||||
github.com/knadh/koanf/v2 v2.2.0
|
||||
github.com/mailru/easyjson v0.7.7
|
||||
github.com/open-telemetry/opamp-go v0.19.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
|
||||
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/prometheus/common v0.67.4
|
||||
github.com/prometheus/prometheus v0.308.0
|
||||
github.com/prometheus/common v0.66.1
|
||||
github.com/prometheus/prometheus v0.304.1
|
||||
github.com/redis/go-redis/extra/redisotel/v9 v9.15.1
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/redis/go-redis/v9 v9.15.1
|
||||
github.com/rs/cors v1.11.1
|
||||
github.com/russellhaering/gosaml2 v0.9.0
|
||||
github.com/russellhaering/goxmldsig v1.2.0
|
||||
@@ -53,7 +54,7 @@ require (
|
||||
github.com/sethvargo/go-password v0.2.0
|
||||
github.com/smartystreets/goconvey v1.8.1
|
||||
github.com/soheilhy/cmux v0.1.5
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.13.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/swaggest/jsonschema-go v0.3.78
|
||||
@@ -63,59 +64,43 @@ require (
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9
|
||||
github.com/uptrace/bun/dialect/sqlitedialect v1.2.9
|
||||
github.com/uptrace/bun/extra/bunotel v1.2.9
|
||||
go.opentelemetry.io/collector/confmap v1.48.0
|
||||
go.opentelemetry.io/collector/otelcol v0.142.0
|
||||
go.opentelemetry.io/collector/pdata v1.48.0
|
||||
go.opentelemetry.io/collector/confmap v1.34.0
|
||||
go.opentelemetry.io/collector/otelcol v0.128.0
|
||||
go.opentelemetry.io/collector/pdata v1.34.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/sdk v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
|
||||
go.opentelemetry.io/otel v1.38.0
|
||||
go.opentelemetry.io/otel/metric v1.38.0
|
||||
go.opentelemetry.io/otel/sdk v1.38.0
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.1
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.46.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/net v0.48.0
|
||||
golang.org/x/oauth2 v0.33.0
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||
golang.org/x/net v0.47.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.32.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
google.golang.org/protobuf v1.36.9
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.35.0-alpha.0
|
||||
k8s.io/apimachinery v0.34.0
|
||||
modernc.org/sqlite v1.39.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 // indirect
|
||||
github.com/aws/smithy-go v1.23.2 // indirect
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.0 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/prometheus/client_golang/exp v0.0.0-20250914183048-a974e0d45e0a // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/swaggest/refl v1.4.0 // indirect
|
||||
@@ -123,27 +108,24 @@ require (
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/client v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configoptional v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exporterhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/xpdata v0.142.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.20.0 // indirect
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.25.1 // indirect
|
||||
cloud.google.com/go/auth v0.17.0 // indirect
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.9.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.8.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.67.0 // indirect
|
||||
github.com/Masterminds/squirrel v1.5.4 // indirect
|
||||
github.com/Yiling-J/theine-go v0.6.2 // indirect
|
||||
@@ -151,35 +133,35 @@ require (
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.8 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.7 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/coder/quartz v0.1.2 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dennwc/varint v1.0.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/ebitengine/purego v0.9.1 // indirect
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||
github.com/elastic/lunes v0.2.0 // indirect
|
||||
github.com/elastic/lunes v0.1.0 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/expr-lang/expr v1.17.7
|
||||
github.com/expr-lang/expr v1.17.5
|
||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.1 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
github.com/go-openapi/errors v0.22.3 // indirect
|
||||
github.com/go-openapi/errors v0.22.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
@@ -196,19 +178,19 @@ require (
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20250905093917-f7b3be9d1853 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.1 // indirect
|
||||
@@ -224,21 +206,21 @@ require (
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.3.0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
|
||||
github.com/magefile/mage v1.15.0 // indirect
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mdlayher/socket v0.5.1 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||
github.com/miekg/dns v1.1.68 // indirect
|
||||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
@@ -247,14 +229,14 @@ require (
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/natefinch/wrap v0.2.0 // indirect
|
||||
github.com/oklog/run v1.2.0 // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.1
|
||||
github.com/open-feature/go-sdk v1.17.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
|
||||
github.com/openfga/openfga v1.10.1
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
@@ -264,10 +246,10 @@ require (
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/pressly/goose/v3 v3.25.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.15.0 // indirect
|
||||
github.com/prometheus/otlptranslator v1.0.0 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/prometheus/sigv4 v0.3.0 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/sigv4 v0.1.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.9.0 // indirect
|
||||
@@ -275,7 +257,7 @@ require (
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.11 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
|
||||
@@ -290,9 +272,9 @@ require (
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/swaggest/openapi-go v0.2.60
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.16 // indirect
|
||||
github.com/tklauser/numcpus v0.11.0 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.15 // indirect
|
||||
github.com/tklauser/numcpus v0.10.0 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/trivago/tgo v1.0.7 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
@@ -301,82 +283,83 @@ require (
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.4 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/collector/component v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.1-0.20250610090210-188191247685
|
||||
go.opentelemetry.io/collector/service v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.13.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.18.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/collector/component v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.0
|
||||
go.opentelemetry.io/collector/service v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.16.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.58.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
go.opentelemetry.io/proto/otlp v1.8.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.257.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/grpc v1.77.0 // indirect
|
||||
google.golang.org/api v0.236.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.34.2 // indirect
|
||||
k8s.io/client-go v0.34.0 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/expr-lang/expr => github.com/SigNoz/expr v1.17.7-beta
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/http/render"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
)
|
||||
|
||||
type ServeOpenAPIFunc func(openapi.OperationContext)
|
||||
@@ -59,7 +60,39 @@ func (handler *handler) ServeOpenAPI(opCtx openapi.OperationContext) {
|
||||
}
|
||||
|
||||
// Add request structure
|
||||
opCtx.AddReqStructure(handler.openAPIDef.Request, openapi.WithContentType(handler.openAPIDef.RequestContentType))
|
||||
reqOpts := []openapi.ContentOption{openapi.WithContentType(handler.openAPIDef.RequestContentType)}
|
||||
if len(handler.openAPIDef.RequestExamples) > 0 {
|
||||
reqOpts = append(reqOpts, openapi.WithCustomize(func(cor openapi.ContentOrReference) {
|
||||
rbOrRef, ok := cor.(*openapi3.RequestBodyOrRef)
|
||||
if !ok || rbOrRef.RequestBody == nil {
|
||||
return
|
||||
}
|
||||
ct := handler.openAPIDef.RequestContentType
|
||||
if ct == "" {
|
||||
ct = "application/json"
|
||||
}
|
||||
mt, exists := rbOrRef.RequestBody.Content[ct]
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
if mt.Examples == nil {
|
||||
mt.Examples = make(map[string]openapi3.ExampleOrRef)
|
||||
}
|
||||
for _, ex := range handler.openAPIDef.RequestExamples {
|
||||
val := ex.Value
|
||||
oaExample := openapi3.Example{Value: &val}
|
||||
if ex.Summary != "" {
|
||||
oaExample.WithSummary(ex.Summary)
|
||||
}
|
||||
if ex.Description != "" {
|
||||
oaExample.WithDescription(ex.Description)
|
||||
}
|
||||
mt.Examples[ex.Name] = openapi3.ExampleOrRef{Example: &oaExample}
|
||||
}
|
||||
rbOrRef.RequestBody.Content[ct] = mt
|
||||
}))
|
||||
}
|
||||
opCtx.AddReqStructure(handler.openAPIDef.Request, reqOpts...)
|
||||
|
||||
// Add request query structure
|
||||
opCtx.AddReqStructure(handler.openAPIDef.RequestQuery)
|
||||
|
||||
@@ -9,6 +9,14 @@ import (
|
||||
"github.com/swaggest/rest/openapi"
|
||||
)
|
||||
|
||||
// OpenAPIExample is a named example for an OpenAPI operation.
|
||||
type OpenAPIExample struct {
|
||||
Name string
|
||||
Summary string
|
||||
Description string
|
||||
Value any
|
||||
}
|
||||
|
||||
// Def is the definition of an OpenAPI operation
|
||||
type OpenAPIDef struct {
|
||||
ID string
|
||||
@@ -18,6 +26,7 @@ type OpenAPIDef struct {
|
||||
Request any
|
||||
RequestQuery any
|
||||
RequestContentType string
|
||||
RequestExamples []OpenAPIExample
|
||||
Response any
|
||||
ResponseContentType string
|
||||
SuccessStatusCode int
|
||||
|
||||
@@ -76,7 +76,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
@@ -156,7 +156,7 @@ func (m *module) PromoteAndIndexPaths(
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyV2Column
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
|
||||
@@ -87,24 +87,6 @@ func (client *client) Read(ctx context.Context, query *prompb.Query, sortSeries
|
||||
return remote.FromQueryResult(sortSeries, res), nil
|
||||
}
|
||||
|
||||
func (c *client) ReadMultiple(ctx context.Context, queries []*prompb.Query, sortSeries bool) (storage.SeriesSet, error) {
|
||||
if len(queries) == 0 {
|
||||
return storage.EmptySeriesSet(), nil
|
||||
}
|
||||
if len(queries) == 1 {
|
||||
return c.Read(ctx, queries[0], sortSeries)
|
||||
}
|
||||
sets := make([]storage.SeriesSet, 0, len(queries))
|
||||
for _, q := range queries {
|
||||
ss, err := c.Read(ctx, q, sortSeries)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sets = append(sets, ss)
|
||||
}
|
||||
return storage.NewMergeSeriesSet(sets, 0, storage.ChainedSeriesMerge), nil
|
||||
}
|
||||
|
||||
func (client *client) queryToClickhouseQuery(_ context.Context, query *prompb.Query, metricName string, subQuery bool) (string, []any, error) {
|
||||
var clickHouseQuery string
|
||||
var conditions []string
|
||||
|
||||
@@ -2,7 +2,6 @@ package prometheus
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -16,24 +15,30 @@ func RemoveExtraLabels(res *promql.Result, labelsToRemove ...string) error {
|
||||
toRemove[l] = struct{}{}
|
||||
}
|
||||
|
||||
dropLabels := func(metric labels.Labels) labels.Labels {
|
||||
b := labels.NewBuilder(metric)
|
||||
for name := range toRemove {
|
||||
b.Del(name)
|
||||
}
|
||||
return b.Labels()
|
||||
}
|
||||
|
||||
switch res.Value.(type) {
|
||||
case promql.Vector:
|
||||
value := res.Value.(promql.Vector)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Matrix:
|
||||
value := res.Value.(promql.Matrix)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Scalar:
|
||||
return nil
|
||||
|
||||
@@ -10,9 +10,11 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -78,11 +80,12 @@ func (q *builderQuery[T]) Fingerprint() string {
|
||||
case qbtypes.LogAggregation:
|
||||
aggParts = append(aggParts, a.Expression)
|
||||
case qbtypes.MetricAggregation:
|
||||
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s",
|
||||
aggParts = append(aggParts, fmt.Sprintf("%s:%s:%s:%s:%s",
|
||||
a.MetricName,
|
||||
a.Temporality.StringValue(),
|
||||
a.TimeAggregation.StringValue(),
|
||||
a.SpaceAggregation.StringValue(),
|
||||
a.SpaceAggregationParam.StringValue(),
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -248,6 +251,40 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -375,3 +412,18 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -393,11 +394,17 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
// Post-process JSON columns: normalize into String value
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
val = string(x)
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
|
||||
454
pkg/querier/openapi.go
Normal file
454
pkg/querier/openapi.go
Normal file
@@ -0,0 +1,454 @@
|
||||
package querier
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/http/handler"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
)
|
||||
|
||||
// QueryRangeV5OpenAPIDef is the OpenAPI definition for the /api/v5/query_range endpoint.
|
||||
var QueryRangeV5OpenAPIDef = handler.OpenAPIDef{
|
||||
ID: "QueryRangeV5",
|
||||
Tags: []string{"query"},
|
||||
Summary: "Query range",
|
||||
Description: "Execute a composite query over a time range. Supports builder queries (traces, logs, metrics), formulas, trace operators, PromQL, and ClickHouse SQL.",
|
||||
Request: new(qbtypes.QueryRangeRequest),
|
||||
RequestContentType: "application/json",
|
||||
RequestExamples: queryRangeV5Examples,
|
||||
Response: new(qbtypes.QueryRangeResponse),
|
||||
ResponseContentType: "application/json",
|
||||
SuccessStatusCode: http.StatusOK,
|
||||
ErrorStatusCodes: []int{http.StatusBadRequest},
|
||||
SecuritySchemes: []handler.OpenAPISecurityScheme{
|
||||
{Name: ctxtypes.AuthTypeAPIKey.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
{Name: ctxtypes.AuthTypeTokenizer.StringValue(), Scopes: []string{"VIEWER"}},
|
||||
},
|
||||
}
|
||||
|
||||
var queryRangeV5Examples = []handler.OpenAPIExample{
|
||||
{
|
||||
Name: "traces_time_series",
|
||||
Summary: "Time series: count spans grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "span_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_time_series",
|
||||
Summary: "Time series: count error logs grouped by service",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "log_count"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
"order": []any{map[string]any{"key": map[string]any{"name": "log_count"}, "direction": "desc"}},
|
||||
"limit": 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_gauge_time_series",
|
||||
Summary: "Time series: latest gauge value averaged across series",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "system.cpu.utilization", "timeAggregation": "latest", "spaceAggregation": "avg"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "host.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_rate_time_series",
|
||||
Summary: "Time series: rate of cumulative counter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum"},
|
||||
},
|
||||
"stepInterval": 120,
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_histogram_time_series",
|
||||
Summary: "Time series: p99 latency from histogram",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.bucket", "spaceAggregation": "p99"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_raw",
|
||||
Summary: "Raw: fetch raw log records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "body", "fieldContext": "log"},
|
||||
map[string]any{"name": "service.name", "fieldContext": "resource"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "log"}, "direction": "desc"},
|
||||
map[string]any{"key": map[string]any{"name": "id"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 50,
|
||||
"offset": 0,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_raw",
|
||||
Summary: "Raw: fetch raw span records",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend' AND has_error = true"},
|
||||
"selectFields": []any{
|
||||
map[string]any{"name": "name", "fieldContext": "span"},
|
||||
map[string]any{"name": "duration_nano", "fieldContext": "span"},
|
||||
},
|
||||
"order": []any{
|
||||
map[string]any{"key": map[string]any{"name": "timestamp", "fieldContext": "span"}, "direction": "desc"},
|
||||
},
|
||||
"limit": 100,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "traces_scalar",
|
||||
Summary: "Scalar: total span count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "span_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "service.name = 'frontend'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "logs_scalar",
|
||||
Summary: "Scalar: total error log count",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "logs",
|
||||
"aggregations": []any{
|
||||
map[string]any{"expression": "count()", "alias": "error_count"},
|
||||
},
|
||||
"filter": map[string]any{"expression": "severity_text = 'ERROR'"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "metrics_scalar",
|
||||
Summary: "Scalar: single reduced metric value",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "metrics",
|
||||
"aggregations": []any{
|
||||
map[string]any{"metricName": "http.server.duration.count", "timeAggregation": "rate", "spaceAggregation": "sum", "reduceTo": "sum"},
|
||||
},
|
||||
"stepInterval": "60s",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "formula",
|
||||
Summary: "Formula: error rate from two trace queries",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "A",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "countIf(has_error = true)"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_query",
|
||||
"spec": map[string]any{
|
||||
"name": "B",
|
||||
"signal": "traces",
|
||||
"aggregations": []any{map[string]any{"expression": "count()"}},
|
||||
"stepInterval": "60s",
|
||||
"groupBy": []any{map[string]any{"name": "service.name", "fieldContext": "resource"}},
|
||||
},
|
||||
},
|
||||
map[string]any{
|
||||
"type": "builder_formula",
|
||||
"spec": map[string]any{
|
||||
"name": "error_rate",
|
||||
"expression": "A / B * 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "promql",
|
||||
Summary: "PromQL: request rate with UTF-8 metric name",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "promql",
|
||||
"spec": map[string]any{
|
||||
"name": "request_rate",
|
||||
"query": "sum(rate({\"http.server.duration.count\"}[5m])) by (\"service.name\")",
|
||||
"step": 60,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_time_series",
|
||||
Summary: "ClickHouse SQL: traces time series with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "time_series",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "span_rate",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT toStartOfInterval(timestamp, INTERVAL 60 SECOND) AS ts, count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" GROUP BY ts ORDER BY ts",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_logs_raw",
|
||||
Summary: "ClickHouse SQL: raw logs with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "raw",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "recent_errors",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT timestamp, body" +
|
||||
" FROM signoz_logs.distributed_logs_v2" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_timestamp_nano AND timestamp <= $end_timestamp_nano" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp" +
|
||||
" AND severity_text = 'ERROR'" +
|
||||
" ORDER BY timestamp DESC LIMIT 100",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "clickhouse_sql_traces_scalar",
|
||||
Summary: "ClickHouse SQL: scalar aggregate with resource filter",
|
||||
Value: map[string]any{
|
||||
"schemaVersion": "v1",
|
||||
"start": 1640995200000,
|
||||
"end": 1640998800000,
|
||||
"requestType": "scalar",
|
||||
"compositeQuery": map[string]any{
|
||||
"queries": []any{
|
||||
map[string]any{
|
||||
"type": "clickhouse_sql",
|
||||
"spec": map[string]any{
|
||||
"name": "total_spans",
|
||||
"query": "WITH __resource_filter AS (" +
|
||||
" SELECT fingerprint FROM signoz_traces.distributed_traces_v3_resource" +
|
||||
" WHERE seen_at_ts_bucket_start >= $start_timestamp - 1800 AND seen_at_ts_bucket_start <= $end_timestamp" +
|
||||
" ) SELECT count() AS value" +
|
||||
" FROM signoz_traces.distributed_signoz_index_v3" +
|
||||
" WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)" +
|
||||
" AND timestamp >= $start_datetime AND timestamp <= $end_datetime" +
|
||||
" AND ts_bucket_start >= $start_timestamp - 1800 AND ts_bucket_start <= $end_timestamp",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
@@ -241,13 +240,13 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
|
||||
var series []*qbv5.TimeSeries
|
||||
for _, v := range matrix {
|
||||
var s qbv5.TimeSeries
|
||||
lbls := make([]*qbv5.Label, 0, v.Metric.Len())
|
||||
v.Metric.Range(func(l labels.Label) {
|
||||
lbls := make([]*qbv5.Label, 0, len(v.Metric))
|
||||
for name, value := range v.Metric.Copy().Map() {
|
||||
lbls = append(lbls, &qbv5.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: l.Name},
|
||||
Value: l.Value,
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
s.Labels = lbls
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -462,12 +461,12 @@ func toCommonSeries(series promql.Series) v3.Series {
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
|
||||
series.Metric.Range(func(l labels.Label) {
|
||||
commonSeries.Labels[l.Name] = l.Value
|
||||
for _, lbl := range series.Metric {
|
||||
commonSeries.Labels[lbl.Name] = lbl.Value
|
||||
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
|
||||
l.Name: l.Value,
|
||||
lbl.Name: lbl.Value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
for _, f := range series.Floats {
|
||||
commonSeries.Points = append(commonSeries.Points, v3.Point{
|
||||
|
||||
@@ -204,10 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString, telemetrytypes.FieldDataTypeJSON:
|
||||
if key.FieldDataType == telemetrytypes.FieldDataTypeJSON {
|
||||
tblFieldName = fmt.Sprintf("toString(%s)", tblFieldName)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -222,6 +219,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -384,7 +383,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
@@ -909,13 +907,6 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(Piyush): Handle this better; Use constants for the strings later
|
||||
if BodyJSONQueryEnabled && fieldKey.Name == "body" && slices.ContainsFunc(fieldKeysForName, func(key *telemetrytypes.TelemetryFieldKey) bool {
|
||||
return key.Name == "message" && key.FieldDataType == telemetrytypes.FieldDataTypeString && key.FieldContext == telemetrytypes.FieldContextBody
|
||||
}) {
|
||||
v.warnings = append(v.warnings, "You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search.")
|
||||
}
|
||||
|
||||
if len(fieldKeysForName) > 1 {
|
||||
warnMsg := fmt.Sprintf(
|
||||
"Key `%s` is ambiguous, found %d different combinations of field context / data type: %v.",
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
package signoz
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"os"
|
||||
"reflect"
|
||||
|
||||
@@ -22,10 +25,13 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/modules/promote"
|
||||
"github.com/SigNoz/signoz/pkg/modules/session"
|
||||
"github.com/SigNoz/signoz/pkg/modules/user"
|
||||
"github.com/SigNoz/signoz/pkg/querier"
|
||||
"github.com/SigNoz/signoz/pkg/types/ctxtypes"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
"github.com/swaggest/openapi-go"
|
||||
"github.com/swaggest/openapi-go/openapi3"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type OpenAPI struct {
|
||||
@@ -57,6 +63,10 @@ func NewOpenAPI(ctx context.Context, instrumentation instrumentation.Instrumenta
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Register routes that live outside the APIServer modules
|
||||
// so they are discovered by the OpenAPI walker.
|
||||
registerQueryRoutes(apiserver.Router())
|
||||
|
||||
reflector := openapi3.NewReflector()
|
||||
reflector.JSONSchemaReflector().DefaultOptions = append(reflector.JSONSchemaReflector().DefaultOptions, jsonschema.InterceptDefName(func(t reflect.Type, defaultDefName string) string {
|
||||
if defaultDefName == "RenderSuccessResponse" {
|
||||
@@ -90,10 +100,67 @@ func (openapi *OpenAPI) CreateAndWrite(path string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
spec, err := openapi.reflector.Spec.MarshalYAML()
|
||||
// The library's MarshalYAML does a JSON round-trip that converts all numbers
|
||||
// to float64, causing large integers (e.g. epoch millisecond timestamps) to
|
||||
// render in scientific notation (1.6409952e+12).
|
||||
jsonData, err := openapi.reflector.Spec.MarshalJSON()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dec := json.NewDecoder(bytes.NewReader(jsonData))
|
||||
dec.UseNumber()
|
||||
|
||||
var v any
|
||||
if err := dec.Decode(&v); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
convertJSONNumbers(v)
|
||||
|
||||
spec, err := yaml.Marshal(v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return os.WriteFile(path, spec, 0o600)
|
||||
}
|
||||
|
||||
// convertJSONNumbers recursively walks a decoded JSON structure and converts
|
||||
// json.Number values to int64 (preferred) or float64 so that YAML marshaling
|
||||
// renders them as plain numbers instead of quoted strings.
|
||||
func convertJSONNumbers(v interface{}) {
|
||||
switch val := v.(type) {
|
||||
case map[string]interface{}:
|
||||
for k, elem := range val {
|
||||
if n, ok := elem.(json.Number); ok {
|
||||
if i, err := n.Int64(); err == nil {
|
||||
val[k] = i
|
||||
} else if f, err := n.Float64(); err == nil {
|
||||
val[k] = f
|
||||
}
|
||||
} else {
|
||||
convertJSONNumbers(elem)
|
||||
}
|
||||
}
|
||||
case []interface{}:
|
||||
for i, elem := range val {
|
||||
if n, ok := elem.(json.Number); ok {
|
||||
if i64, err := n.Int64(); err == nil {
|
||||
val[i] = i64
|
||||
} else if f, err := n.Float64(); err == nil {
|
||||
val[i] = f
|
||||
}
|
||||
} else {
|
||||
convertJSONNumbers(elem)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func registerQueryRoutes(router *mux.Router) {
|
||||
router.Handle("/api/v5/query_range", handler.New(
|
||||
func(http.ResponseWriter, *http.Request) {},
|
||||
querier.QueryRangeV5OpenAPIDef,
|
||||
)).Methods(http.MethodPost)
|
||||
}
|
||||
|
||||
@@ -35,19 +35,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
if column.Type.GetType() == schema.ColumnTypeEnumJSON {
|
||||
// If field data is Not JSON Column Itself, then we need to build a JSON condition
|
||||
if querybuilder.BodyJSONQueryEnabled && key.MustBuildJSONCondition() {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
} else if key.FieldDataType == telemetrytypes.FieldDataTypeJSON && !operator.IsOpValidForJSON() {
|
||||
// Skip building condition for invalid operators on JSON columns
|
||||
return "", nil
|
||||
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
@@ -114,6 +108,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
|
||||
@@ -181,16 +176,9 @@ func (c *conditionBuilder) conditionFor(
|
||||
var value any
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), false), nil
|
||||
}
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), true), nil
|
||||
default:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
} else {
|
||||
return sb.IsNull(tblFieldName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -20,7 +17,7 @@ const (
|
||||
LogsV2TimestampColumn = "timestamp"
|
||||
LogsV2ObservedTimestampColumn = "observed_timestamp"
|
||||
LogsV2BodyColumn = "body"
|
||||
LogsV2BodyV2Column = constants.BodyV2Column
|
||||
LogsV2BodyJSONColumn = constants.BodyJSONColumn
|
||||
LogsV2BodyPromotedColumn = constants.BodyPromotedColumn
|
||||
LogsV2TraceIDColumn = "trace_id"
|
||||
LogsV2SpanIDColumn = "span_id"
|
||||
@@ -37,20 +34,11 @@ const (
|
||||
LogsV2ResourcesStringColumn = "resources_string"
|
||||
LogsV2ScopeStringColumn = "scope_string"
|
||||
|
||||
BodyV2ColumnPrefix = constants.BodyV2ColumnPrefix
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
MessageSubColumn = "message"
|
||||
)
|
||||
|
||||
var (
|
||||
// mapping for body logical field to message sub column
|
||||
BodyLogicalFieldJSONMapping = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: MessageSubColumn,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
JSONDataType: &telemetrytypes.String,
|
||||
}
|
||||
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -130,29 +118,3 @@ var (
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func bodyAliasExpression() string {
|
||||
if !querybuilder.BodyJSONQueryEnabled {
|
||||
return LogsV2BodyColumn
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s as body", LogsV2BodyV2Column)
|
||||
}
|
||||
|
||||
func init() {
|
||||
// body logical field is mapped to message field in the body context that too only with String data type
|
||||
err := BodyLogicalFieldJSONMapping.SetJSONAccessPlan(telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: LogsV2BodyV2Column,
|
||||
PromotedColumn: LogsV2BodyPromotedColumn,
|
||||
}, map[string][]telemetrytypes.JSONDataType{
|
||||
MessageSubColumn: {telemetrytypes.String},
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
DefaultFullTextColumn = BodyLogicalFieldJSONMapping
|
||||
IntrinsicFields["body"] = *BodyLogicalFieldJSONMapping
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyV2Column: {Name: LogsV2BodyV2Column, Type: schema.JSONColumnType{
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
@@ -61,13 +61,11 @@ var (
|
||||
}
|
||||
)
|
||||
|
||||
type fieldMapper struct {
|
||||
}
|
||||
type fieldMapper struct {}
|
||||
|
||||
func NewFieldMapper() qbtypes.FieldMapper {
|
||||
return &fieldMapper{}
|
||||
}
|
||||
|
||||
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) (*schema.Column, error) {
|
||||
switch key.FieldContext {
|
||||
case telemetrytypes.FieldContextResource:
|
||||
@@ -91,9 +89,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_v2 if feature flag is enabled
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
@@ -102,9 +100,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
if !ok {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_v2 if feature flag is enabled and we have a body condition builder
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
@@ -149,9 +147,6 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
}
|
||||
|
||||
return m.buildFieldForJSON(key)
|
||||
case telemetrytypes.FieldContextLog:
|
||||
// return the column name as is for log context fields
|
||||
return column.Name, nil
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
@@ -257,12 +252,27 @@ func (m *fieldMapper) buildFieldForJSON(key *telemetrytypes.TelemetryFieldKey) (
|
||||
"plan length is less than 2 for promoted path: %s", key.Name)
|
||||
}
|
||||
|
||||
// promoted column first then body_v2 column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)",
|
||||
fmt.Sprintf("dynamicElement(%s, '%s')", plan[1].FieldPath(), plan[1].TerminalConfig.ElemType.StringValue()),
|
||||
expr,
|
||||
node := plan[1]
|
||||
promotedExpr := fmt.Sprintf(
|
||||
"dynamicElement(%s, '%s')",
|
||||
node.FieldPath(),
|
||||
node.TerminalConfig.ElemType.StringValue(),
|
||||
)
|
||||
|
||||
// dynamicElement returns NULL for scalar types or an empty array for array types.
|
||||
if node.TerminalConfig.ElemType.IsArray {
|
||||
expr = fmt.Sprintf(
|
||||
"if(length(%s) > 0, %s, %s)",
|
||||
promotedExpr,
|
||||
promotedExpr,
|
||||
expr,
|
||||
)
|
||||
} else {
|
||||
// promoted column first then body_json column
|
||||
// TODO(Piyush): Change this in future for better performance
|
||||
expr = fmt.Sprintf("coalesce(%s, %s)", promotedExpr, expr)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return expr, nil
|
||||
@@ -284,8 +294,7 @@ func (m *fieldMapper) buildArrayConcat(plan telemetrytypes.JSONAccessPlan) (stri
|
||||
}
|
||||
|
||||
// Build arrayMap expressions for ALL available branches at the root level.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic) so generated SQL
|
||||
// is stable across environments; map iteration order is random in Go.
|
||||
// Iterate branches in deterministic order (JSON then Dynamic)
|
||||
var arrayMapExpressions []string
|
||||
for _, node := range plan {
|
||||
for _, branchType := range node.BranchesInOrder() {
|
||||
|
||||
@@ -30,7 +30,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
|
||||
return &jsonConditionBuilder{key: key, valueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType]}
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_v2 JSON paths
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
conditions := []string{}
|
||||
for _, node := range c.key.JSONPlan {
|
||||
@@ -40,7 +40,6 @@ func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperato
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
@@ -73,9 +72,9 @@ func (c *jsonConditionBuilder) buildTerminalCondition(node *telemetrytypes.JSONA
|
||||
|
||||
// switch operator for array membership checks
|
||||
switch operator {
|
||||
case qbtypes.FilterOperatorContains, qbtypes.FilterOperatorIn:
|
||||
case qbtypes.FilterOperatorContains:
|
||||
operator = qbtypes.FilterOperatorEqual
|
||||
case qbtypes.FilterOperatorNotContains, qbtypes.FilterOperatorNotIn:
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
operator = qbtypes.FilterOperatorNotEqual
|
||||
}
|
||||
}
|
||||
@@ -191,13 +190,14 @@ func (c *jsonConditionBuilder) buildArrayMembershipCondition(node *telemetrytype
|
||||
arrayExpr = typedArrayExpr()
|
||||
}
|
||||
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, "x", operator)
|
||||
key := "x"
|
||||
fieldExpr, value := querybuilder.DataTypeCollisionHandledFieldName(&localKeyCopy, value, key, operator)
|
||||
op, err := c.applyOperator(sb, fieldExpr, operator, value)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", fieldExpr, op, arrayExpr), nil
|
||||
return fmt.Sprintf("arrayExists(%s -> %s, %s)", key, op, arrayExpr), nil
|
||||
}
|
||||
|
||||
// recurseArrayHops recursively builds array traversal conditions
|
||||
@@ -279,27 +279,31 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(fieldExpr, fmt.Sprintf("%%%v%%", value)), nil
|
||||
case qbtypes.FilterOperatorIn, qbtypes.FilterOperatorNotIn:
|
||||
// emulate IN/NOT IN using OR/AND over equals to leverage indexes consistently
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
values = []any{value}
|
||||
}
|
||||
conds := []string{}
|
||||
for _, v := range values {
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
conds = append(conds, sb.E(fieldExpr, v))
|
||||
} else {
|
||||
conds = append(conds, sb.NE(fieldExpr, v))
|
||||
}
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorIn {
|
||||
return sb.Or(conds...), nil
|
||||
return sb.In(fieldExpr, values...), nil
|
||||
}
|
||||
return sb.And(conds...), nil
|
||||
return sb.NotIn(fieldExpr, values...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return sb.IsNotNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return sb.IsNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
// between and not between
|
||||
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
|
||||
values, ok := value.([]any)
|
||||
if !ok {
|
||||
return "", qbtypes.ErrBetweenValues
|
||||
}
|
||||
if len(values) != 2 {
|
||||
return "", qbtypes.ErrBetweenValues
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorBetween {
|
||||
return sb.Between(fieldExpr, values[0], values[1]), nil
|
||||
}
|
||||
return sb.NotBetween(fieldExpr, values[0], values[1]), nil
|
||||
default:
|
||||
return "", qbtypes.ErrUnsupportedOperator
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -3,12 +3,12 @@ package telemetrylogs
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
)
|
||||
|
||||
func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetrytypes.FieldDataType, any) {
|
||||
@@ -41,31 +41,55 @@ func parseStrValue(valueStr string, operator qbtypes.FilterOperator) (telemetryt
|
||||
}
|
||||
|
||||
func InferDataType(value any, operator qbtypes.FilterOperator, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
|
||||
// check if the value is a int, float, string, bool
|
||||
valueType := telemetrytypes.FieldDataTypeUnspecified
|
||||
switch v := value.(type) {
|
||||
case []any:
|
||||
// take the first element and infer the type
|
||||
if len(v) > 0 {
|
||||
valueType, _ = InferDataType(v[0], operator, key)
|
||||
if operator.IsArrayOperator() && reflect.ValueOf(value).Kind() != reflect.Slice {
|
||||
value = []any{value}
|
||||
}
|
||||
|
||||
// closure to calculate the data type of the value
|
||||
var closure func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any)
|
||||
closure = func(value any, key *telemetrytypes.TelemetryFieldKey) (telemetrytypes.FieldDataType, any) {
|
||||
// check if the value is a int, float, string, bool
|
||||
valueType := telemetrytypes.FieldDataTypeUnspecified
|
||||
switch v := value.(type) {
|
||||
case []any:
|
||||
// take the first element and infer the type
|
||||
var scalerType telemetrytypes.FieldDataType
|
||||
if len(v) > 0 {
|
||||
// Note: [[...]] Slices inside Slices are not handled yet
|
||||
if reflect.ValueOf(v[0]).Kind() == reflect.Slice {
|
||||
return telemetrytypes.FieldDataTypeUnspecified, value
|
||||
}
|
||||
|
||||
scalerType, _ = closure(v[0], key)
|
||||
}
|
||||
|
||||
arrayType := telemetrytypes.ScalerFieldTypeToArrayFieldType[scalerType]
|
||||
switch {
|
||||
// decide on the field data type based on the key
|
||||
case key.FieldDataType.IsArray():
|
||||
return arrayType, v
|
||||
default:
|
||||
// TODO(Piyush): backward compatibility for the old String based JSON QB queries
|
||||
if strings.HasSuffix(key.Name, telemetrytypes.ArrayAnyIndexSuffix) {
|
||||
return arrayType, v
|
||||
}
|
||||
return scalerType, v
|
||||
}
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
valueType = telemetrytypes.FieldDataTypeFloat64
|
||||
case string:
|
||||
valueType, value = parseStrValue(v, operator)
|
||||
case bool:
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
}
|
||||
return valueType, v
|
||||
case uint8, uint16, uint32, uint64, int, int8, int16, int32, int64:
|
||||
valueType = telemetrytypes.FieldDataTypeInt64
|
||||
case float32, float64:
|
||||
valueType = telemetrytypes.FieldDataTypeFloat64
|
||||
case string:
|
||||
valueType, value = parseStrValue(v, operator)
|
||||
case bool:
|
||||
valueType = telemetrytypes.FieldDataTypeBool
|
||||
|
||||
return valueType, value
|
||||
}
|
||||
|
||||
// check if it is array
|
||||
if strings.HasSuffix(key.Name, "[*]") || strings.HasSuffix(key.Name, "[]") {
|
||||
valueType = telemetrytypes.FieldDataType{String: valuer.NewString(fmt.Sprintf("[]%s", valueType.StringValue()))}
|
||||
}
|
||||
|
||||
return valueType, value
|
||||
// calculate the data type of the value
|
||||
return closure(value, key)
|
||||
}
|
||||
|
||||
func getBodyJSONPath(key *telemetrytypes.TelemetryFieldKey) string {
|
||||
|
||||
@@ -203,6 +203,7 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
}
|
||||
|
||||
func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
|
||||
|
||||
// First check if it matches with any intrinsic fields
|
||||
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
|
||||
if _, ok := IntrinsicFields[key.Name]; ok {
|
||||
@@ -211,6 +212,7 @@ func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldK
|
||||
}
|
||||
|
||||
return querybuilder.AdjustKey(key, keys, nil)
|
||||
|
||||
}
|
||||
|
||||
// buildListQuery builds a query for list panel type
|
||||
@@ -247,7 +249,11 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2SeverityNumberColumn)
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(bodyAliasExpression())
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
@@ -422,6 +421,38 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "IN operator with json search",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "body.user_names[*] IN 'john_doe'",
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "has with json search",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{
|
||||
Expression: "has(body.user_names[*], 'john_doe')",
|
||||
},
|
||||
Limit: 10,
|
||||
},
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
@@ -855,154 +886,3 @@ func TestAdjustKey(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStmtBuilderBodyField(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
enableBodyJSONQuery bool
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "body_exists",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body Exists"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') IS NOT NULL) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_exists_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body Exists"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_empty",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body == ''"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_empty_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body == ''"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_contains",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (LOWER(dynamicElement(body_v2.`message`, 'String')) LIKE LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_contains_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
enable, disable := jsonQueryTestUtil(t)
|
||||
defer disable()
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
if c.enableBodyJSONQuery {
|
||||
enable()
|
||||
} else {
|
||||
disable()
|
||||
}
|
||||
// build the key map after enabling/disabling body JSON query
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
if err != nil {
|
||||
_, _, _, _, _, add := errors.Unwrapb(err)
|
||||
t.Logf("error additionals: %v", add)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,13 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"body": {
|
||||
{
|
||||
Name: "body",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"http.status_code": {
|
||||
{
|
||||
Name: "http.status_code",
|
||||
@@ -938,11 +945,6 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
key.Signal = telemetrytypes.SignalLogs
|
||||
}
|
||||
}
|
||||
|
||||
// add intrinsic fields to the map
|
||||
for fieldName, key := range IntrinsicFields {
|
||||
keysMap[fieldName] = append(keysMap[fieldName], &key)
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
|
||||
@@ -112,7 +112,8 @@ func (t *telemetryMetaStore) buildBodyJSONPaths(ctx context.Context,
|
||||
}
|
||||
|
||||
for _, fieldKey := range fieldKeys {
|
||||
fieldKey.Materialized = promoted.Contains(fieldKey.Name)
|
||||
promotedKey := strings.Split(fieldKey.Name, telemetrytypes.ArraySep)[0]
|
||||
fieldKey.Materialized = promoted.Contains(promotedKey)
|
||||
fieldKey.Indexes = indexes[fieldKey.Name]
|
||||
}
|
||||
|
||||
@@ -253,7 +254,7 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
sb.Where(sb.Equal("database", telemetrylogs.DBName))
|
||||
sb.Where(sb.Equal("table", telemetrylogs.LogsV2LocalTableName))
|
||||
sb.Where(sb.Or(
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix))),
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix))),
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix))),
|
||||
))
|
||||
|
||||
@@ -337,7 +338,7 @@ func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, li
|
||||
if promoted {
|
||||
path = telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
} else {
|
||||
path = telemetrylogs.BodyV2ColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONColumnPrefix + path
|
||||
}
|
||||
|
||||
from := fmt.Sprintf("%s.%s", telemetrylogs.DBName, telemetrylogs.LogsV2TableName)
|
||||
@@ -501,7 +502,8 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
sb := sqlbuilder.Select("path").From(fmt.Sprintf("%s.%s", DBName, PromotedPathsTableName))
|
||||
pathConditions := []string{}
|
||||
for _, path := range paths {
|
||||
pathConditions = append(pathConditions, sb.Equal("path", path))
|
||||
split := strings.Split(path, telemetrytypes.ArraySep)
|
||||
pathConditions = append(pathConditions, sb.Equal("path", split[0]))
|
||||
}
|
||||
sb.Where(sb.Or(pathConditions...))
|
||||
|
||||
@@ -527,7 +529,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
// TODO(Piyush): Remove this function
|
||||
func CleanPathPrefixes(path string) string {
|
||||
path = strings.TrimPrefix(path, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
|
||||
},
|
||||
},
|
||||
@@ -130,7 +130,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("foo")),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("bar")),
|
||||
|
||||
@@ -100,7 +100,7 @@ func NewTelemetryMetaStore(
|
||||
jsonColumnMetadata: map[telemetrytypes.Signal]map[telemetrytypes.FieldContext]telemetrytypes.JSONColumnMetadata{
|
||||
telemetrytypes.SignalLogs: {
|
||||
telemetrytypes.FieldContextBody: telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: telemetrylogs.LogsV2BodyV2Column,
|
||||
BaseColumn: telemetrylogs.LogsV2BodyJSONColumn,
|
||||
PromotedColumn: telemetrylogs.LogsV2BodyPromotedColumn,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -7,6 +7,6 @@ const (
|
||||
AttributesMetadataTableName = "distributed_attributes_metadata"
|
||||
AttributesMetadataLocalTableName = "attributes_metadata"
|
||||
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
||||
PromotedPathsTableName = "distributed_json_promoted_paths"
|
||||
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
|
||||
SkipIndexTableName = "system.data_skipping_indices"
|
||||
)
|
||||
|
||||
@@ -123,7 +123,7 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
origTimeAgg := query.Aggregations[0].TimeAggregation
|
||||
origGroupBy := slices.Clone(query.GroupBy)
|
||||
|
||||
if query.Aggregations[0].SpaceAggregation.IsPercentile() &&
|
||||
if (query.Aggregations[0].SpaceAggregation.IsPercentile() || query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount) &&
|
||||
query.Aggregations[0].Type != metrictypes.ExpHistogramType {
|
||||
// add le in the group by if doesn't exist
|
||||
leExists := false
|
||||
@@ -154,7 +154,11 @@ func (b *MetricQueryStatementBuilder) buildPipelineStatement(
|
||||
}
|
||||
|
||||
// make the time aggregation rate and space aggregation sum
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
|
||||
if query.Aggregations[0].SpaceAggregation.IsPercentile() {
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationRate
|
||||
} else {
|
||||
query.Aggregations[0].TimeAggregation = metrictypes.TimeAggregationIncrease
|
||||
}
|
||||
query.Aggregations[0].SpaceAggregation = metrictypes.SpaceAggregationSum
|
||||
}
|
||||
|
||||
@@ -524,7 +528,7 @@ func (b *MetricQueryStatementBuilder) buildSpatialAggregationCTE(
|
||||
return "", nil, errors.Newf(
|
||||
errors.TypeInvalidInput,
|
||||
errors.CodeInvalidInput,
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`]",
|
||||
"invalid space aggregation, should be one of the following: [`sum`, `avg`, `min`, `max`, `count`, `p50`, `p75`, `p90`, `p95`, `p99`, `histogram_count`]",
|
||||
)
|
||||
}
|
||||
sb := sqlbuilder.NewSelectBuilder()
|
||||
@@ -577,6 +581,34 @@ func (b *MetricQueryStatementBuilder) BuildFinalSelect(
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
sb.Having(rewrittenExpr)
|
||||
}
|
||||
} else if query.Aggregations[0].SpaceAggregation == metrictypes.SpaceAggregationHistogramCount {
|
||||
sb.Select("ts")
|
||||
|
||||
for _, g := range query.GroupBy {
|
||||
sb.SelectMore(fmt.Sprintf("`%s`", g.TelemetryFieldKey.Name))
|
||||
}
|
||||
|
||||
switch query.Aggregations[0].SpaceAggregationParam.(type) {
|
||||
case metrictypes.ComparisonSpaceAggregationParam:
|
||||
aggQuery, err := AggregationQueryForHistogramCount(query.Aggregations[0].SpaceAggregationParam.(metrictypes.ComparisonSpaceAggregationParam))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sb.SelectMore(aggQuery)
|
||||
default:
|
||||
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "no aggregation param provided for histogram count")
|
||||
}
|
||||
|
||||
sb.From("__spatial_aggregation_cte")
|
||||
|
||||
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
|
||||
sb.GroupBy("ts")
|
||||
|
||||
if query.Having != nil && query.Having.Expression != "" {
|
||||
rewriter := querybuilder.NewHavingExpressionRewriter()
|
||||
rewrittenExpr := rewriter.RewriteForMetrics(query.Having.Expression, query.Aggregations)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package telemetrymetrics
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -308,3 +309,17 @@ func AggregationColumnForSamplesTable(
|
||||
}
|
||||
return aggregationColumn, nil
|
||||
}
|
||||
|
||||
func AggregationQueryForHistogramCount(params metrictypes.ComparisonSpaceAggregationParam) (string, error) {
|
||||
histogramCountLimit := params.Limit
|
||||
|
||||
switch params.Operater {
|
||||
case "<=":
|
||||
return fmt.Sprintf("argMaxIf(value, toFloat64(le), toFloat64(le) <= %f) + (argMinIf(value, toFloat64(le), toFloat64(le) > %f) - argMaxIf(value, toFloat64(le), toFloat64(le) <= %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) <= %f)) / (minIf(toFloat64(le), toFloat64(le) > %f) - maxIf(toFloat64(le), toFloat64(le) <= %f)) AS value", histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit), nil
|
||||
case ">":
|
||||
return fmt.Sprintf("argMax(value, toFloat64(le)) - (argMaxIf(value, toFloat64(le), toFloat64(le) < %f) + (argMinIf(value, toFloat64(le), toFloat64(le) >= %f) - argMaxIf(value, toFloat64(le), toFloat64(le) < %f)) * (%f - maxIf(toFloat64(le), toFloat64(le) < %f)) / (minIf(toFloat64(le), toFloat64(le) >= %f) - maxIf(toFloat64(le), toFloat64(le) <= %f))) AS value", histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit, histogramCountLimit), nil
|
||||
default:
|
||||
return "", errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "invalid space aggregation operator, should be one of the following: [`<=`, `>`]")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package metrictypes
|
||||
|
||||
import (
|
||||
"database/sql/driver"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
@@ -172,7 +173,6 @@ var (
|
||||
|
||||
func (TimeAggregation) Enum() []any {
|
||||
return []any{
|
||||
TimeAggregationUnspecified,
|
||||
TimeAggregationLatest,
|
||||
TimeAggregationSum,
|
||||
TimeAggregationAvg,
|
||||
@@ -190,22 +190,22 @@ type SpaceAggregation struct {
|
||||
}
|
||||
|
||||
var (
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
SpaceAggregationUnspecified = SpaceAggregation{valuer.NewString("")}
|
||||
SpaceAggregationSum = SpaceAggregation{valuer.NewString("sum")}
|
||||
SpaceAggregationAvg = SpaceAggregation{valuer.NewString("avg")}
|
||||
SpaceAggregationMin = SpaceAggregation{valuer.NewString("min")}
|
||||
SpaceAggregationMax = SpaceAggregation{valuer.NewString("max")}
|
||||
SpaceAggregationCount = SpaceAggregation{valuer.NewString("count")}
|
||||
SpaceAggregationPercentile50 = SpaceAggregation{valuer.NewString("p50")}
|
||||
SpaceAggregationPercentile75 = SpaceAggregation{valuer.NewString("p75")}
|
||||
SpaceAggregationPercentile90 = SpaceAggregation{valuer.NewString("p90")}
|
||||
SpaceAggregationPercentile95 = SpaceAggregation{valuer.NewString("p95")}
|
||||
SpaceAggregationPercentile99 = SpaceAggregation{valuer.NewString("p99")}
|
||||
SpaceAggregationHistogramCount = SpaceAggregation{valuer.NewString("histogram_count")}
|
||||
)
|
||||
|
||||
func (SpaceAggregation) Enum() []any {
|
||||
return []any{
|
||||
SpaceAggregationUnspecified,
|
||||
SpaceAggregationSum,
|
||||
SpaceAggregationAvg,
|
||||
SpaceAggregationMin,
|
||||
@@ -216,6 +216,7 @@ func (SpaceAggregation) Enum() []any {
|
||||
SpaceAggregationPercentile90,
|
||||
SpaceAggregationPercentile95,
|
||||
SpaceAggregationPercentile99,
|
||||
SpaceAggregationHistogramCount,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -258,3 +259,22 @@ type MetricTableHints struct {
|
||||
type MetricValueFilter struct {
|
||||
Value float64
|
||||
}
|
||||
|
||||
type SpaceAggregationParam interface {
|
||||
StringValue() string
|
||||
}
|
||||
|
||||
type NoSpaceAggregationParam struct{}
|
||||
|
||||
func (_ NoSpaceAggregationParam) StringValue() string {
|
||||
return "{}"
|
||||
}
|
||||
|
||||
type ComparisonSpaceAggregationParam struct {
|
||||
Operater string `json:"operator"`
|
||||
Limit float64 `json:"limit"`
|
||||
}
|
||||
|
||||
func (cso ComparisonSpaceAggregationParam) StringValue() string {
|
||||
return fmt.Sprintf("{\"operator\": \"%s\", \"limit\": \"%f\"}", cso.Operater, cso.Limit)
|
||||
}
|
||||
|
||||
@@ -36,8 +36,8 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
if strings.HasPrefix(i.Path, constants.BodyV2ColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyV2ColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
|
||||
@@ -10,10 +10,35 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type Step struct{ time.Duration }
|
||||
|
||||
var _ jsonschema.Exposer = Step{}
|
||||
|
||||
// JSONSchema returns a custom schema for Step that accepts either a duration string or a number (seconds).
|
||||
func (Step) JSONSchema() (jsonschema.Schema, error) {
|
||||
s := jsonschema.Schema{}
|
||||
s.WithDescription("Step interval. Accepts a Go duration string (e.g., \"60s\", \"1m\", \"1h\") or a number representing seconds (e.g., 60).")
|
||||
|
||||
strSchema := jsonschema.Schema{}
|
||||
strSchema.WithType(jsonschema.String.Type())
|
||||
strSchema.WithExamples("60s", "5m", "1h")
|
||||
strSchema.WithDescription("Duration string (e.g., \"60s\", \"5m\", \"1h\").")
|
||||
|
||||
numSchema := jsonschema.Schema{}
|
||||
numSchema.WithType(jsonschema.Number.Type())
|
||||
numSchema.WithExamples(60, 300, 3600)
|
||||
numSchema.WithDescription("Duration in seconds.")
|
||||
|
||||
s.OneOf = []jsonschema.SchemaOrBool{
|
||||
strSchema.ToSchemaOrBool(),
|
||||
numSchema.ToSchemaOrBool(),
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s *Step) UnmarshalJSON(b []byte) error {
|
||||
if len(b) == 0 {
|
||||
return nil
|
||||
@@ -161,9 +186,11 @@ func (f FilterOperator) IsStringSearchOperator() bool {
|
||||
}
|
||||
}
|
||||
|
||||
func (f FilterOperator) IsOpValidForJSON() bool {
|
||||
// IsArrayOperator returns true if the operator works with array values only
|
||||
func (f FilterOperator) IsArrayOperator() bool {
|
||||
switch f {
|
||||
case FilterOperatorExists, FilterOperatorNotExists, FilterOperatorContains, FilterOperatorNotContains:
|
||||
case FilterOperatorIn, FilterOperatorNotIn,
|
||||
FilterOperatorBetween, FilterOperatorNotBetween:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
@@ -179,6 +206,14 @@ var (
|
||||
OrderDirectionDesc = OrderDirection{valuer.NewString("desc")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for OrderDirection.
|
||||
func (OrderDirection) Enum() []any {
|
||||
return []any{
|
||||
OrderDirectionAsc,
|
||||
OrderDirectionDesc,
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
OrderDirectionMap = map[string]OrderDirection{
|
||||
"asc": OrderDirectionAsc,
|
||||
@@ -201,6 +236,19 @@ var (
|
||||
ReduceToMedian = ReduceTo{valuer.NewString("median")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for ReduceTo.
|
||||
func (ReduceTo) Enum() []any {
|
||||
return []any{
|
||||
ReduceToSum,
|
||||
ReduceToCount,
|
||||
ReduceToAvg,
|
||||
ReduceToMin,
|
||||
ReduceToMax,
|
||||
ReduceToLast,
|
||||
ReduceToMedian,
|
||||
}
|
||||
}
|
||||
|
||||
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
|
||||
// reduceTo can be one of: last, sum, avg, min, max, count, median
|
||||
// if reduceTo is not recognized, the function returns the original series
|
||||
@@ -398,6 +446,8 @@ type MetricAggregation struct {
|
||||
TimeAggregation metrictypes.TimeAggregation `json:"timeAggregation"`
|
||||
// space aggregation to apply to the query
|
||||
SpaceAggregation metrictypes.SpaceAggregation `json:"spaceAggregation"`
|
||||
// param for space aggregation if needed
|
||||
SpaceAggregationParam metrictypes.SpaceAggregationParam `json:"spaceAggregationParam"`
|
||||
// table hints to use for the query
|
||||
TableHints *metrictypes.MetricTableHints `json:"-"`
|
||||
// value filter to apply to the query
|
||||
@@ -406,6 +456,40 @@ type MetricAggregation struct {
|
||||
ReduceTo ReduceTo `json:"reduceTo,omitempty"`
|
||||
}
|
||||
|
||||
func (m *MetricAggregation) UnmarshalJSON(data []byte) error {
|
||||
type Alias MetricAggregation
|
||||
aux := &struct {
|
||||
SpaceAggregationParam json.RawMessage `json:"spaceAggregationParam"`
|
||||
*Alias
|
||||
}{
|
||||
Alias: (*Alias)(m),
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(data, &aux); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If no param provided
|
||||
if len(aux.SpaceAggregationParam) == 0 || string(aux.SpaceAggregationParam) == "null" {
|
||||
m.SpaceAggregationParam = metrictypes.NoSpaceAggregationParam{}
|
||||
return nil
|
||||
}
|
||||
|
||||
switch m.SpaceAggregation {
|
||||
case metrictypes.SpaceAggregationHistogramCount:
|
||||
var p metrictypes.ComparisonSpaceAggregationParam
|
||||
if err := json.Unmarshal(aux.SpaceAggregationParam, &p); err != nil {
|
||||
return err
|
||||
}
|
||||
m.SpaceAggregationParam = p
|
||||
|
||||
default:
|
||||
m.SpaceAggregationParam = metrictypes.NoSpaceAggregationParam{}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Copy creates a deep copy of MetricAggregation
|
||||
func (m MetricAggregation) Copy() MetricAggregation {
|
||||
c := m
|
||||
|
||||
@@ -36,6 +36,30 @@ var (
|
||||
FunctionNameFillZero = FunctionName{valuer.NewString("fillZero")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for FunctionName.
|
||||
func (FunctionName) Enum() []any {
|
||||
return []any{
|
||||
FunctionNameCutOffMin,
|
||||
FunctionNameCutOffMax,
|
||||
FunctionNameClampMin,
|
||||
FunctionNameClampMax,
|
||||
FunctionNameAbsolute,
|
||||
FunctionNameRunningDiff,
|
||||
FunctionNameLog2,
|
||||
FunctionNameLog10,
|
||||
FunctionNameCumulativeSum,
|
||||
FunctionNameEWMA3,
|
||||
FunctionNameEWMA5,
|
||||
FunctionNameEWMA7,
|
||||
FunctionNameMedian3,
|
||||
FunctionNameMedian5,
|
||||
FunctionNameMedian7,
|
||||
FunctionNameTimeShift,
|
||||
FunctionNameAnomaly,
|
||||
FunctionNameFillZero,
|
||||
}
|
||||
}
|
||||
|
||||
// Validate checks if the FunctionName is valid and one of the known types
|
||||
func (fn FunctionName) Validate() error {
|
||||
validFunctions := []FunctionName{
|
||||
|
||||
@@ -16,6 +16,17 @@ var (
|
||||
JoinTypeCross = JoinType{valuer.NewString("cross")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for JoinType.
|
||||
func (JoinType) Enum() []any {
|
||||
return []any{
|
||||
JoinTypeInner,
|
||||
JoinTypeLeft,
|
||||
JoinTypeRight,
|
||||
JoinTypeFull,
|
||||
JoinTypeCross,
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRef struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@ package querybuildertypesv5
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type Query interface {
|
||||
@@ -29,4 +31,12 @@ type ExecStats struct {
|
||||
StepIntervals map[string]uint64 `json:"stepIntervals,omitempty"`
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &ExecStats{}
|
||||
|
||||
// PrepareJSONSchema adds description to the ExecStats schema.
|
||||
func (e *ExecStats) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Execution statistics for the query, including rows scanned, bytes scanned, and duration.")
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeRange struct{ From, To uint64 } // ms since epoch
|
||||
|
||||
@@ -16,3 +16,17 @@ var (
|
||||
QueryTypeClickHouseSQL = QueryType{valuer.NewString("clickhouse_sql")}
|
||||
QueryTypePromQL = QueryType{valuer.NewString("promql")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for QueryType.
|
||||
func (QueryType) Enum() []any {
|
||||
return []any{
|
||||
QueryTypeBuilder,
|
||||
QueryTypeFormula,
|
||||
// Not yet supported.
|
||||
// QueryTypeSubQuery,
|
||||
// QueryTypeJoin,
|
||||
QueryTypeTraceOperator,
|
||||
QueryTypeClickHouseSQL,
|
||||
QueryTypePromQL,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/types/metrictypes"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type QueryEnvelope struct {
|
||||
@@ -18,6 +19,71 @@ type QueryEnvelope struct {
|
||||
Spec any `json:"spec"`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderTrace is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=traces.
|
||||
type queryEnvelopeBuilderTrace struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[TraceAggregation] `json:"spec" description:"The trace builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderLog is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=logs.
|
||||
type queryEnvelopeBuilderLog struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[LogAggregation] `json:"spec" description:"The log builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeBuilderMetric is the OpenAPI schema for a QueryEnvelope with type=builder_query and signal=metrics.
|
||||
type queryEnvelopeBuilderMetric struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderQuery[MetricAggregation] `json:"spec" description:"The metric builder query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeFormula is the OpenAPI schema for a QueryEnvelope with type=builder_formula.
|
||||
type queryEnvelopeFormula struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderFormula `json:"spec" description:"The formula specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeJoin is the OpenAPI schema for a QueryEnvelope with type=builder_join.
|
||||
// type queryEnvelopeJoin struct {
|
||||
// Type QueryType `json:"type" description:"The type of the query."`
|
||||
// Spec QueryBuilderJoin `json:"spec" description:"The join specification."`
|
||||
// }
|
||||
|
||||
// queryEnvelopeTraceOperator is the OpenAPI schema for a QueryEnvelope with type=builder_trace_operator.
|
||||
type queryEnvelopeTraceOperator struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec QueryBuilderTraceOperator `json:"spec" description:"The trace operator specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopePromQL is the OpenAPI schema for a QueryEnvelope with type=promql.
|
||||
type queryEnvelopePromQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec PromQuery `json:"spec" description:"The PromQL query specification."`
|
||||
}
|
||||
|
||||
// queryEnvelopeClickHouseSQL is the OpenAPI schema for a QueryEnvelope with type=clickhouse_sql.
|
||||
type queryEnvelopeClickHouseSQL struct {
|
||||
Type QueryType `json:"type" description:"The type of the query."`
|
||||
Spec ClickHouseQuery `json:"spec" description:"The ClickHouse SQL query specification."`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryEnvelope{}
|
||||
|
||||
// JSONSchemaOneOf returns the oneOf variants for the QueryEnvelope discriminated union.
|
||||
// Each variant represents a different query type with its corresponding spec schema.
|
||||
func (QueryEnvelope) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
queryEnvelopeBuilderTrace{},
|
||||
queryEnvelopeBuilderLog{},
|
||||
queryEnvelopeBuilderMetric{},
|
||||
queryEnvelopeFormula{},
|
||||
// queryEnvelopeJoin{},
|
||||
queryEnvelopeTraceOperator{},
|
||||
queryEnvelopePromQL{},
|
||||
queryEnvelopeClickHouseSQL{},
|
||||
}
|
||||
}
|
||||
|
||||
// implement custom json unmarshaler for the QueryEnvelope
|
||||
func (q *QueryEnvelope) UnmarshalJSON(data []byte) error {
|
||||
var shadow struct {
|
||||
@@ -130,6 +196,12 @@ type CompositeQuery struct {
|
||||
Queries []QueryEnvelope `json:"queries"`
|
||||
}
|
||||
|
||||
// PrepareJSONSchema adds description to the CompositeQuery schema.
|
||||
func (c *CompositeQuery) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Composite query containing one or more query envelopes. Each query envelope specifies its type and corresponding spec.")
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalJSON implements custom JSON unmarshaling to provide better error messages
|
||||
func (c *CompositeQuery) UnmarshalJSON(data []byte) error {
|
||||
type Alias CompositeQuery
|
||||
@@ -192,6 +264,16 @@ var (
|
||||
TextBoxVariableType = VariableType{valuer.NewString("text")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for VariableType.
|
||||
func (VariableType) Enum() []any {
|
||||
return []any{
|
||||
QueryVariableType,
|
||||
DynamicVariableType,
|
||||
CustomVariableType,
|
||||
TextBoxVariableType,
|
||||
}
|
||||
}
|
||||
|
||||
type VariableItem struct {
|
||||
Type VariableType `json:"type"`
|
||||
Value any `json:"value"`
|
||||
@@ -217,6 +299,12 @@ type QueryRangeRequest struct {
|
||||
FormatOptions *FormatOptions `json:"formatOptions,omitempty"`
|
||||
}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeRequest schema.
|
||||
func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Request body for the v5 query range endpoint. Supports builder queries (traces, logs, metrics), formulas, joins, trace operators, PromQL, and ClickHouse SQL queries.")
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
|
||||
stepsMap := make(map[string]int64)
|
||||
for _, query := range r.CompositeQuery.Queries {
|
||||
|
||||
@@ -30,3 +30,15 @@ var (
|
||||
func (r RequestType) IsAggregation() bool {
|
||||
return r == RequestTypeTimeSeries || r == RequestTypeScalar || r == RequestTypeDistribution
|
||||
}
|
||||
|
||||
// Enum implements jsonschema.Enum; returns the acceptable values for RequestType.
|
||||
func (RequestType) Enum() []any {
|
||||
return []any{
|
||||
RequestTypeScalar,
|
||||
RequestTypeTimeSeries,
|
||||
RequestTypeRaw,
|
||||
RequestTypeRawStream,
|
||||
RequestTypeTrace,
|
||||
// RequestTypeDistribution,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/swaggest/jsonschema-go"
|
||||
)
|
||||
|
||||
type QBEvent struct {
|
||||
@@ -42,6 +43,17 @@ type QueryData struct {
|
||||
Results []any `json:"results"`
|
||||
}
|
||||
|
||||
var _ jsonschema.OneOfExposer = QueryData{}
|
||||
|
||||
// JSONSchemaOneOf documents the polymorphic result types in QueryData.Results.
|
||||
func (QueryData) JSONSchemaOneOf() []any {
|
||||
return []any{
|
||||
TimeSeriesData{},
|
||||
ScalarData{},
|
||||
RawData{},
|
||||
}
|
||||
}
|
||||
|
||||
type QueryRangeResponse struct {
|
||||
Type RequestType `json:"type"`
|
||||
Data QueryData `json:"data"`
|
||||
@@ -52,6 +64,14 @@ type QueryRangeResponse struct {
|
||||
QBEvent *QBEvent `json:"-"`
|
||||
}
|
||||
|
||||
var _ jsonschema.Preparer = &QueryRangeResponse{}
|
||||
|
||||
// PrepareJSONSchema adds description to the QueryRangeResponse schema.
|
||||
func (q *QueryRangeResponse) PrepareJSONSchema(schema *jsonschema.Schema) error {
|
||||
schema.WithDescription("Response from the v5 query range endpoint. The data.results array contains typed results depending on the requestType: TimeSeriesData for time_series, ScalarData for scalar, or RawData for raw requests.")
|
||||
return nil
|
||||
}
|
||||
|
||||
type TimeSeriesData struct {
|
||||
QueryName string `json:"queryName"`
|
||||
Aggregations []*AggregationBucket `json:"aggregations"`
|
||||
@@ -159,6 +179,14 @@ var (
|
||||
ColumnTypeAggregation = ColumnType{valuer.NewString("aggregation")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for ColumnType.
|
||||
func (ColumnType) Enum() []any {
|
||||
return []any{
|
||||
ColumnTypeGroup,
|
||||
ColumnTypeAggregation,
|
||||
}
|
||||
}
|
||||
|
||||
type ColumnDescriptor struct {
|
||||
telemetrytypes.TelemetryFieldKey
|
||||
QueryName string `json:"queryName"`
|
||||
|
||||
@@ -23,8 +23,10 @@ const (
|
||||
// e.g., "body.status" where "body." is the prefix
|
||||
BodyJSONStringSearchPrefix = "body."
|
||||
ArraySep = jsontypeexporter.ArraySeparator
|
||||
ArraySepSuffix = "[]"
|
||||
// TODO(Piyush): Remove once we've migrated to the new array syntax
|
||||
ArrayAnyIndex = "[*]."
|
||||
ArrayAnyIndex = "[*]."
|
||||
ArrayAnyIndexSuffix = "[*]"
|
||||
)
|
||||
|
||||
type TelemetryFieldKey struct {
|
||||
@@ -60,10 +62,6 @@ func (f *TelemetryFieldKey) ArrayParentPaths() []string {
|
||||
return paths
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) MustBuildJSONCondition() bool {
|
||||
return f.FieldDataType != FieldDataTypeJSON && f.JSONDataType != nil
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) ArrayParentSelectors() []*FieldKeySelector {
|
||||
paths := f.ArrayParentPaths()
|
||||
selectors := make([]*FieldKeySelector, 0, len(paths))
|
||||
|
||||
@@ -172,3 +172,18 @@ func isContextValidForSignal(ctx FieldContext, signal Signal) bool {
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldContext.
|
||||
func (FieldContext) Enum() []any {
|
||||
return []any{
|
||||
FieldContextMetric,
|
||||
FieldContextLog,
|
||||
FieldContextSpan,
|
||||
// FieldContextTrace,
|
||||
FieldContextResource,
|
||||
// FieldContextScope,
|
||||
FieldContextAttribute,
|
||||
// FieldContextEvent,
|
||||
FieldContextBody,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ var (
|
||||
// int64 and number are synonyms for float64
|
||||
FieldDataTypeInt64 = FieldDataType{valuer.NewString("int64")}
|
||||
FieldDataTypeNumber = FieldDataType{valuer.NewString("number")}
|
||||
FieldDataTypeJSON = FieldDataType{valuer.NewString("json")}
|
||||
FieldDataTypeUnspecified = FieldDataType{valuer.NewString("")}
|
||||
|
||||
FieldDataTypeArrayString = FieldDataType{valuer.NewString("[]string")}
|
||||
@@ -94,6 +93,14 @@ var (
|
||||
FieldDataTypeArrayFloat64: "Array(Float64)",
|
||||
FieldDataTypeArrayBool: "Array(Bool)",
|
||||
}
|
||||
|
||||
ScalerFieldTypeToArrayFieldType = map[FieldDataType]FieldDataType{
|
||||
FieldDataTypeString: FieldDataTypeArrayString,
|
||||
FieldDataTypeBool: FieldDataTypeArrayBool,
|
||||
FieldDataTypeNumber: FieldDataTypeArrayNumber,
|
||||
FieldDataTypeInt64: FieldDataTypeArrayInt64,
|
||||
FieldDataTypeFloat64: FieldDataTypeArrayFloat64,
|
||||
}
|
||||
)
|
||||
|
||||
func (f FieldDataType) CHDataType() string {
|
||||
@@ -170,3 +177,19 @@ func (f FieldDataType) TagDataType() string {
|
||||
return "string"
|
||||
}
|
||||
}
|
||||
|
||||
// Enum returns the acceptable values for FieldDataType.
|
||||
func (FieldDataType) Enum() []any {
|
||||
return []any{
|
||||
FieldDataTypeString,
|
||||
FieldDataTypeBool,
|
||||
FieldDataTypeFloat64,
|
||||
FieldDataTypeInt64,
|
||||
FieldDataTypeNumber,
|
||||
// FieldDataTypeArrayString,
|
||||
// FieldDataTypeArrayFloat64,
|
||||
// FieldDataTypeArrayBool,
|
||||
// FieldDataTypeArrayInt64,
|
||||
// FieldDataTypeArrayNumber,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -40,7 +40,7 @@ type JSONAccessNode struct {
|
||||
// Node information
|
||||
Name string
|
||||
IsTerminal bool
|
||||
isRoot bool // marked true for only body_v2 and body_promoted
|
||||
isRoot bool // marked true for only body_json and body_json_promoted
|
||||
|
||||
// Precomputed type information (single source of truth)
|
||||
AvailableTypes []JSONDataType
|
||||
|
||||
@@ -1,19 +1,12 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
otelconstants "github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
const (
|
||||
bodyV2Column = otelconstants.BodyV2Column
|
||||
bodyPromotedColumn = otelconstants.BodyPromotedColumn
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Test Data Creation
|
||||
// ============================================================================
|
||||
@@ -116,8 +109,8 @@ func TestNode_Alias(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Root node returns name as-is",
|
||||
node: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
expected: bodyV2Column,
|
||||
node: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
expected: "body_json",
|
||||
},
|
||||
{
|
||||
name: "Node without parent returns backticked name",
|
||||
@@ -131,9 +124,9 @@ func TestNode_Alias(t *testing.T) {
|
||||
name: "Node with root parent uses dot separator",
|
||||
node: &JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".age`",
|
||||
expected: "`" + "body_json" + ".age`",
|
||||
},
|
||||
{
|
||||
name: "Node with non-root parent uses array separator",
|
||||
@@ -141,10 +134,10 @@ func TestNode_Alias(t *testing.T) {
|
||||
Name: "name",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".education[].name`",
|
||||
expected: "`" + "body_json" + ".education[].name`",
|
||||
},
|
||||
{
|
||||
name: "Nested array path with multiple levels",
|
||||
@@ -154,11 +147,11 @@ func TestNode_Alias(t *testing.T) {
|
||||
Name: "awards",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".education[].awards[].type`",
|
||||
expected: "`" + "body_json" + ".education[].awards[].type`",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -180,18 +173,18 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
name: "Simple field path from root",
|
||||
node: &JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: bodyV2Column + ".`user`",
|
||||
expected: "body_json" + ".`user`",
|
||||
},
|
||||
{
|
||||
name: "Field path with backtick-required key",
|
||||
node: &JSONAccessNode{
|
||||
Name: "user-name", // requires backtick
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
expected: bodyV2Column + ".`user-name`",
|
||||
expected: "body_json" + ".`user-name`",
|
||||
},
|
||||
{
|
||||
name: "Nested field path",
|
||||
@@ -199,11 +192,11 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
Name: "age",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + bodyV2Column + ".user`.`age`",
|
||||
expected: "`" + "body_json" + ".user`.`age`",
|
||||
},
|
||||
{
|
||||
name: "Array element field path",
|
||||
@@ -211,11 +204,11 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
Name: "name",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + bodyV2Column + ".education`.`name`",
|
||||
expected: "`" + "body_json" + ".education`.`name`",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -243,36 +236,36 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
{
|
||||
name: "Simple path not promoted",
|
||||
key: makeKey("user.name", String, false),
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Simple path promoted",
|
||||
key: makeKey("user.name", String, true),
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column, bodyPromotedColumn),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Empty path returns error",
|
||||
@@ -285,8 +278,8 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
@@ -311,9 +304,9 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
{
|
||||
name: "Single array level - JSON branch only",
|
||||
path: "education[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -325,14 +318,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Single array level - both JSON and Dynamic branches",
|
||||
path: "education[].awards[].type",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -359,14 +352,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Deeply nested array path",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -406,14 +399,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "ArrayAnyIndex replacement [*] to []",
|
||||
path: "education[*].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -425,7 +418,7 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -433,8 +426,8 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
key := makeKey(tt.path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, key.JSONPlan)
|
||||
@@ -452,15 +445,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
t.Run("Non-promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 1)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -487,7 +480,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column)
|
||||
`
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
@@ -495,15 +488,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
t.Run("Promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, String, true)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 2)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -531,7 +524,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -561,7 +554,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column, bodyPromotedColumn)
|
||||
`
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
@@ -582,11 +575,11 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -626,14 +619,14 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -646,20 +639,20 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: Array(JSON)
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -675,8 +668,8 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
}
|
||||
key := makeKey(tt.path, keyType, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
@@ -694,15 +687,15 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
path := "education[].awards[].participated[].team[].branch"
|
||||
key := makeKey(path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 1)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -787,7 +780,7 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column)
|
||||
`
|
||||
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
|
||||
@@ -12,3 +12,12 @@ var (
|
||||
SignalMetrics = Signal{valuer.NewString("metrics")}
|
||||
SignalUnspecified = Signal{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for Signal.
|
||||
func (Signal) Enum() []any {
|
||||
return []any{
|
||||
SignalTraces,
|
||||
SignalLogs,
|
||||
SignalMetrics,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,3 +10,10 @@ var (
|
||||
SourceMeter = Source{valuer.NewString("meter")}
|
||||
SourceUnspecified = Source{valuer.NewString("")}
|
||||
)
|
||||
|
||||
// Enum returns the acceptable values for Source.
|
||||
func (Source) Enum() []any {
|
||||
return []any{
|
||||
SourceMeter,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package telemetrytypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
@@ -177,9 +176,8 @@ func matchesKey(selector *telemetrytypes.FieldKeySelector, key *telemetrytypes.T
|
||||
return true
|
||||
}
|
||||
|
||||
matchNameExceptions := []string{"body"}
|
||||
// Check name (already checked in matchesName, but double-check here)
|
||||
if selector.Name != "" && !matchesName(selector, key.Name) && slices.Contains(matchNameExceptions, key.Name) {
|
||||
if selector.Name != "" && !matchesName(selector, key.Name) {
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
@@ -65,6 +65,7 @@ func TestJSONTypeSet() (map[string][]JSONDataType, MetadataStore) {
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].unit": {String},
|
||||
"interests[].entities[].reviews[].entries[].metadata[].positions[].ratings": {ArrayInt64, ArrayString},
|
||||
"message": {String},
|
||||
"tags": {ArrayString},
|
||||
}
|
||||
|
||||
return types, nil
|
||||
|
||||
62
tests/integration/src/passwordauthn/06_duplicate_invite.py
Normal file
62
tests/integration/src/passwordauthn/06_duplicate_invite.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from http import HTTPStatus
|
||||
from typing import Callable
|
||||
|
||||
import requests
|
||||
|
||||
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
|
||||
from fixtures.types import SigNoz
|
||||
|
||||
DUPLICATE_USER_EMAIL = "duplicate@integration.test"
|
||||
|
||||
|
||||
def test_duplicate_user_invite_rejected(
|
||||
signoz: SigNoz,
|
||||
get_token: Callable[[str, str], str],
|
||||
):
|
||||
"""
|
||||
Verify that the unique index on (email, org_id) in the users table prevents
|
||||
creating duplicate users. This invites a new user, accepts the invite, then
|
||||
tries to invite and accept the same email again expecting a failure.
|
||||
"""
|
||||
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
|
||||
|
||||
# Step 1: Invite a new user.
|
||||
initial_invite_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": DUPLICATE_USER_EMAIL, "role": "EDITOR"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
assert initial_invite_response.status_code == HTTPStatus.CREATED
|
||||
initial_invite_token = initial_invite_response.json()["data"]["token"]
|
||||
|
||||
# Step 2: Accept the invite to create the user.
|
||||
initial_accept_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
|
||||
json={"token": initial_invite_token, "password": "password123Z$"},
|
||||
timeout=2,
|
||||
)
|
||||
assert initial_accept_response.status_code == HTTPStatus.CREATED
|
||||
|
||||
# Step 3: Invite the same email again.
|
||||
duplicate_invite_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite"),
|
||||
json={"email": DUPLICATE_USER_EMAIL, "role": "VIEWER"},
|
||||
headers={"Authorization": f"Bearer {admin_token}"},
|
||||
timeout=2,
|
||||
)
|
||||
|
||||
# The invite creation itself may be rejected if the app checks for existing users.
|
||||
if duplicate_invite_response.status_code != HTTPStatus.CREATED:
|
||||
assert duplicate_invite_response.status_code == HTTPStatus.CONFLICT
|
||||
return
|
||||
|
||||
duplicate_invite_token = duplicate_invite_response.json()["data"]["token"]
|
||||
|
||||
# Step 4: Accept the duplicate invite — should fail due to unique constraint.
|
||||
duplicate_accept_response = requests.post(
|
||||
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
|
||||
json={"token": duplicate_invite_token, "password": "password123Z$"},
|
||||
timeout=2,
|
||||
)
|
||||
assert duplicate_accept_response.status_code == HTTPStatus.CONFLICT
|
||||
Reference in New Issue
Block a user