Compare commits

..

2 Commits

66 changed files with 109 additions and 4335 deletions

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/authz/openfgaauthz"
@@ -94,9 +93,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(_ licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return noopgateway.NewProviderFactory()
},
func(_ licensing.Licensing) factory.NamedMap[factory.ProviderFactory[auditor.Auditor, auditor.Config]] {
return signoz.NewAuditorProviderFactories()
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
return querier.NewHandler(ps, q, a)
},

View File

@@ -8,7 +8,6 @@ import (
"github.com/spf13/cobra"
"github.com/SigNoz/signoz/cmd"
"github.com/SigNoz/signoz/ee/auditor/otlphttpauditor"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/oidccallbackauthn"
"github.com/SigNoz/signoz/ee/authn/callbackauthn/samlcallbackauthn"
"github.com/SigNoz/signoz/ee/authz/openfgaauthz"
@@ -25,7 +24,6 @@ import (
enterprisezeus "github.com/SigNoz/signoz/ee/zeus"
"github.com/SigNoz/signoz/ee/zeus/httpzeus"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/authn"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
@@ -135,13 +133,6 @@ func runServer(ctx context.Context, config signoz.Config, logger *slog.Logger) e
func(licensing licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config] {
return httpgateway.NewProviderFactory(licensing)
},
func(licensing licensing.Licensing) factory.NamedMap[factory.ProviderFactory[auditor.Auditor, auditor.Config]] {
factories := signoz.NewAuditorProviderFactories()
if err := factories.Add(otlphttpauditor.NewFactory(licensing, version.Info)); err != nil {
panic(err)
}
return factories
},
func(ps factory.ProviderSettings, q querier.Querier, a analytics.Analytics) querier.Handler {
communityHandler := querier.NewHandler(ps, q, a)
return eequerier.NewHandler(ps, q, communityHandler)

View File

@@ -364,34 +364,3 @@ serviceaccount:
analytics:
# toggle service account analytics
enabled: true
##################### Auditor #####################
auditor:
# Specifies the auditor provider to use.
# noop: discards all audit events (community default).
# otlphttp: exports audit events via OTLP HTTP (enterprise).
provider: noop
# The async channel capacity for audit events. Events are dropped when full (fail-open).
buffer_size: 1000
# The maximum number of events per export batch.
batch_size: 100
# The maximum time between export flushes.
flush_interval: 1s
otlphttp:
# The target scheme://host:port/path of the OTLP HTTP endpoint.
endpoint: http://localhost:4318/v1/logs
# Whether to use HTTP instead of HTTPS.
insecure: false
# The maximum duration for an export attempt.
timeout: 10s
# Additional HTTP headers sent with every export request.
headers: {}
retry:
# Whether to retry on transient failures.
enabled: true
# The initial wait time before the first retry.
initial_interval: 5s
# The upper bound on backoff interval.
max_interval: 30s
# The total maximum time spent retrying.
max_elapsed_time: 60s

View File

@@ -227,7 +227,7 @@ func (s *Server) createPublicServer(apiHandler *api.APIHandler, web web.Web) (*h
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewAudit(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes, s.signoz.Auditor).Wrap)
r.Use(middleware.NewAudit(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes, nil).Wrap)
r.Use(middleware.NewComment().Wrap)
apiHandler.RegisterRoutes(r, am)

View File

@@ -139,12 +139,10 @@
"react-helmet-async": "1.3.0",
"react-hook-form": "7.71.2",
"react-i18next": "^11.16.1",
"react-json-tree": "0.20.0",
"react-lottie": "1.2.10",
"react-markdown": "8.0.7",
"react-query": "3.39.3",
"react-redux": "^7.2.2",
"react-rnd": "10.5.3",
"react-router-dom": "^5.2.0",
"react-router-dom-v5-compat": "6.27.0",
"react-syntax-highlighter": "15.5.0",

View File

@@ -1,40 +0,0 @@
.details-header {
// ghost + secondary missing hover bg token in @signozhq/button
--button-ghost-hover-background: var(--l3-background);
display: flex;
align-items: center;
gap: 8px;
padding: 12px;
border-bottom: 1px solid var(--l1-border);
height: 36px;
background: var(--l2-background);
&__icon-btn {
flex-shrink: 0;
}
&__title {
font-size: 13px;
font-weight: 500;
color: var(--l1-foreground);
flex: 1;
min-width: 0;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
&__actions {
display: flex;
align-items: center;
gap: 4px;
flex-shrink: 0;
}
&__nav {
display: flex;
align-items: center;
gap: 2px;
}
}

View File

@@ -1,59 +0,0 @@
import { ReactNode } from 'react';
import { Button } from '@signozhq/button';
import { X } from '@signozhq/icons';
import './DetailsHeader.styles.scss';
export interface HeaderAction {
key: string;
component: ReactNode; // check later if we can use direct btn itself or not.
}
export interface DetailsHeaderProps {
title: string;
onClose: () => void;
actions?: HeaderAction[];
closePosition?: 'left' | 'right';
className?: string;
}
function DetailsHeader({
title,
onClose,
actions,
closePosition = 'right',
className,
}: DetailsHeaderProps): JSX.Element {
const closeButton = (
<Button
variant="ghost"
size="icon"
color="secondary"
onClick={onClose}
aria-label="Close"
className="details-header__icon-btn"
>
<X size={14} />
</Button>
);
return (
<div className={`details-header ${className || ''}`}>
{closePosition === 'left' && closeButton}
<span className="details-header__title">{title}</span>
{actions && (
<div className="details-header__actions">
{actions.map((action) => (
<div key={action.key}>{action.component}</div>
))}
</div>
)}
{closePosition === 'right' && closeButton}
</div>
);
}
export default DetailsHeader;

View File

@@ -1,7 +0,0 @@
.details-panel-drawer {
&__body {
display: flex;
flex-direction: column;
height: 100%;
}
}

View File

@@ -1,36 +0,0 @@
import { DrawerWrapper } from '@signozhq/drawer';
import './DetailsPanelDrawer.styles.scss';
interface DetailsPanelDrawerProps {
isOpen: boolean;
onClose: () => void;
children: React.ReactNode;
className?: string;
}
function DetailsPanelDrawer({
isOpen,
onClose,
children,
className,
}: DetailsPanelDrawerProps): JSX.Element {
return (
<DrawerWrapper
open={isOpen}
onOpenChange={(open): void => {
if (!open) {
onClose();
}
}}
direction="right"
type="panel"
showOverlay={false}
allowOutsideClick
className={`details-panel-drawer ${className || ''}`}
content={<div className="details-panel-drawer__body">{children}</div>}
/>
);
}
export default DetailsPanelDrawer;

View File

@@ -1,8 +0,0 @@
export type {
DetailsHeaderProps,
HeaderAction,
} from './DetailsHeader/DetailsHeader';
export { default as DetailsHeader } from './DetailsHeader/DetailsHeader';
export { default as DetailsPanelDrawer } from './DetailsPanelDrawer';
export type { DetailsPanelState, UseDetailsPanelOptions } from './types';
export { default as useDetailsPanel } from './useDetailsPanel';

View File

@@ -1,10 +0,0 @@
export interface DetailsPanelState {
isOpen: boolean;
open: () => void;
close: () => void;
}
export interface UseDetailsPanelOptions {
entityId: string | undefined;
onClose?: () => void;
}

View File

@@ -1,29 +0,0 @@
import { useCallback, useEffect, useRef, useState } from 'react';
import { DetailsPanelState, UseDetailsPanelOptions } from './types';
function useDetailsPanel({
entityId,
onClose,
}: UseDetailsPanelOptions): DetailsPanelState {
const [isOpen, setIsOpen] = useState<boolean>(false);
const prevEntityIdRef = useRef<string>('');
useEffect(() => {
const currentId = entityId || '';
if (currentId && currentId !== prevEntityIdRef.current) {
setIsOpen(true);
}
prevEntityIdRef.current = currentId;
}, [entityId]);
const open = useCallback(() => setIsOpen(true), []);
const close = useCallback(() => {
setIsOpen(false);
onClose?.();
}, [onClose]);
return { isOpen, open, close };
}
export default useDetailsPanel;

View File

@@ -677,18 +677,6 @@ function NewWidget({
queryType: currentQuery.queryType,
isNewPanel,
dataSource: currentQuery?.builder?.queryData?.[0]?.dataSource,
...(currentQuery.queryType === EQueryType.CLICKHOUSE && {
clickhouseQueryCount: currentQuery.clickhouse_sql.length,
clickhouseQueries: currentQuery.clickhouse_sql.map((q) => ({
name: q.name,
query: (q.query ?? '')
.replace(/--[^\n]*/g, '') // strip line comments
.replace(/\/\*[\s\S]*?\*\//g, '') // strip block comments
.replace(/'(?:[^'\\]|\\.|'')*'/g, "'?'") // replace single-quoted strings (handles \' and '' escapes)
.replace(/\b\d+(?:\.\d+)?(?:[eE][+-]?\d+)?\b/g, '?'), // replace numeric literals (int, float, scientific)
disabled: q.disabled,
})),
}),
});
setSaveModal(true);
// eslint-disable-next-line react-hooks/exhaustive-deps

View File

@@ -1,54 +0,0 @@
.data-viewer {
display: flex;
flex-direction: column;
flex: 1;
min-height: 0;
// Toolbar — view mode switcher + copy button
&__toolbar {
display: flex;
align-items: center;
justify-content: space-between;
padding-bottom: 8px;
}
&__mode-select {
min-width: 90px;
height: 28px !important;
.ant-select-selector {
border-radius: 4px !important;
border-color: var(--l2-border) !important;
font-size: 12px !important;
}
}
&__copy-btn {
display: flex;
align-items: center;
justify-content: center;
padding: 4px 8px;
border: 1px solid var(--l2-border);
border-radius: 4px;
background: transparent;
color: var(--l2-foreground);
cursor: pointer;
&:hover {
color: var(--l1-foreground);
background: var(--l3-background);
}
}
// Shared content container — no scroll, each view handles its own
&__content {
flex: 1;
display: flex;
flex-direction: column;
border: 1px solid var(--l2-border);
border-radius: 4px;
padding: 8px;
min-height: 0;
overflow: hidden;
}
}

View File

@@ -1,67 +0,0 @@
import { useMemo, useState } from 'react';
import { Copy } from '@signozhq/icons';
// TODO: Replace antd Select with @signozhq/ui component when moving to design library
import { Select } from 'antd';
import { JsonView } from 'periscope/components/JsonView';
import { PrettyView } from 'periscope/components/PrettyView';
import { PrettyViewProps } from 'periscope/components/PrettyView';
import { copyToClipboard } from './utils';
import './DataViewer.styles.scss';
type ViewMode = 'pretty' | 'json';
export interface DataViewerProps {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: Record<string, any>;
drawerKey?: string;
prettyViewProps?: Omit<PrettyViewProps, 'data' | 'drawerKey'>;
}
function DataViewer({
data,
drawerKey = 'default',
prettyViewProps,
}: DataViewerProps): JSX.Element {
const [viewMode, setViewMode] = useState<ViewMode>('pretty');
const jsonString = useMemo(() => JSON.stringify(data, null, 2), [data]);
return (
<div className="data-viewer">
<div className="data-viewer__toolbar">
<Select
className="data-viewer__mode-select"
size="small"
value={viewMode}
onChange={(value: ViewMode): void => setViewMode(value)}
options={[
{ label: 'Pretty', value: 'pretty' },
{ label: 'JSON', value: 'json' },
]}
getPopupContainer={(trigger): HTMLElement =>
trigger.parentElement || document.body
}
/>
<button
type="button"
className="data-viewer__copy-btn"
onClick={(): void => copyToClipboard(data)}
aria-label="Copy JSON"
>
<Copy size={14} />
</button>
</div>
<div className="data-viewer__content">
{viewMode === 'pretty' && (
<PrettyView data={data} drawerKey={drawerKey} {...prettyViewProps} />
)}
{viewMode === 'json' && <JsonView data={jsonString} />}
</div>
</div>
);
}
export default DataViewer;

View File

@@ -1,2 +0,0 @@
export type { DataViewerProps } from './DataViewer';
export { default as DataViewer } from './DataViewer';

View File

@@ -1,12 +0,0 @@
import { toast } from '@signozhq/sonner';
export function copyToClipboard(value: unknown): void {
const text =
typeof value === 'object' ? JSON.stringify(value, null, 2) : String(value);
// eslint-disable-next-line no-restricted-properties
navigator.clipboard.writeText(text);
toast.success('Copied to clipboard', {
richColors: true,
position: 'top-right',
});
}

View File

@@ -1,22 +0,0 @@
.floating-panel {
z-index: 999;
background: var(--l1-background);
border: 1px solid var(--l2-border);
border-radius: 6px;
box-shadow: 0 8px 24px rgba(0, 0, 0, 0.4);
overflow: hidden;
// Inner div fills the Rnd-controlled size
&__inner {
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
overflow: hidden;
}
// Drag handle — any child with this class becomes the drag target
.floating-panel__drag-handle {
cursor: move;
}
}

View File

@@ -1,80 +0,0 @@
import { ComponentProps } from 'react';
import { createPortal } from 'react-dom';
import { Rnd } from 'react-rnd';
import './FloatingPanel.styles.scss';
type EnableResizing = ComponentProps<typeof Rnd>['enableResizing'];
export interface FloatingPanelProps {
isOpen: boolean;
children: React.ReactNode;
defaultPosition?: { x: number; y: number };
width?: number;
height?: number;
minWidth?: number;
minHeight?: number;
enableResizing?: EnableResizing;
className?: string;
}
function FloatingPanel({
isOpen,
children,
defaultPosition,
width = 560,
height = 600,
minWidth = 400,
minHeight = 300,
enableResizing,
className,
}: FloatingPanelProps): JSX.Element | null {
if (!isOpen) {
return null;
}
const initialPosition = defaultPosition || {
x: window.innerWidth - width - 24,
y: 80,
};
return createPortal(
<Rnd
default={{
x: initialPosition.x,
y: initialPosition.y,
width,
height,
}}
dragHandleClassName="floating-panel__drag-handle"
minWidth={minWidth}
minHeight={minHeight}
onDrag={(_e, d): void | false => {
const HEADER_HEIGHT = 40;
// Top: don't allow header to go above viewport
if (d.y < 0) {
return false;
}
// Left: don't allow panel to go off-screen left
if (d.x < 0) {
return false;
}
// Bottom: only header needs to be visible
if (d.y > window.innerHeight - HEADER_HEIGHT) {
return false;
}
// Right: at least the close button (~40px) stays visible
if (d.x > window.innerWidth - 40) {
return false;
}
}}
className={`floating-panel ${className || ''}`}
enableResizing={enableResizing}
>
<div className="floating-panel__inner">{children}</div>
</Rnd>,
document.body,
);
}
export default FloatingPanel;

View File

@@ -1,2 +0,0 @@
export type { FloatingPanelProps } from './FloatingPanel';
export { default as FloatingPanel } from './FloatingPanel';

View File

@@ -1,22 +0,0 @@
.json-view {
display: flex;
flex-direction: column;
flex: 1;
min-height: 0;
overflow: hidden;
&__footer {
flex-shrink: 0;
height: 36px;
display: flex;
align-items: center;
border-top: 1px solid var(--l2-border);
}
&__wrap-toggle {
display: flex;
gap: 8px;
margin-left: 12px;
align-items: center;
}
}

View File

@@ -1,78 +0,0 @@
import { useState } from 'react';
import MEditor, { EditorProps, Monaco } from '@monaco-editor/react';
import { Color } from '@signozhq/design-tokens';
import { Switch, Typography } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode';
import './JsonView.styles.scss';
export interface JsonViewProps {
data: string;
height?: string;
}
const editorOptions: EditorProps['options'] = {
automaticLayout: true,
readOnly: true,
wordWrap: 'on',
minimap: { enabled: false },
fontWeight: 400,
fontFamily: 'SF Mono, Geist Mono, Fira Code, monospace',
fontSize: 12,
lineHeight: '18px',
colorDecorators: true,
scrollBeyondLastLine: false,
decorationsOverviewRuler: false,
scrollbar: { vertical: 'hidden', horizontal: 'hidden' },
folding: false,
};
function setEditorTheme(monaco: Monaco): void {
monaco.editor.defineTheme('signoz-dark', {
base: 'vs-dark',
inherit: true,
rules: [
{ token: 'string.key.json', foreground: Color.BG_VANILLA_400 },
{ token: 'string.value.json', foreground: Color.BG_ROBIN_400 },
],
colors: {
'editor.background': '#00000000', // transparent
},
fontFamily: 'SF Mono, Geist Mono, Fira Code, monospace',
fontSize: 12,
fontWeight: 'normal',
lineHeight: 18,
letterSpacing: -0.06,
});
}
function JsonView({ data, height = '575px' }: JsonViewProps): JSX.Element {
const [isWrapWord, setIsWrapWord] = useState(true);
const isDarkMode = useIsDarkMode();
return (
<div className="json-view">
<MEditor
value={data}
language="json"
options={{ ...editorOptions, wordWrap: isWrapWord ? 'on' : 'off' }}
onChange={(): void => {}}
height={height}
theme={isDarkMode ? 'signoz-dark' : 'light'}
beforeMount={setEditorTheme}
/>
<div className="json-view__footer">
<div className="json-view__wrap-toggle">
<Typography.Text>Wrap text</Typography.Text>
<Switch
checked={isWrapWord}
onChange={(checked): void => setIsWrapWord(checked)}
size="small"
/>
</div>
</div>
</div>
);
}
export default JsonView;

View File

@@ -1,2 +0,0 @@
export type { JsonViewProps } from './JsonView';
export { default as JsonView } from './JsonView';

View File

@@ -1,67 +1,43 @@
.key-value-label {
display: flex;
border: 1px solid var(--l2-border);
align-items: center;
border: 1px solid var(--bg-slate-400);
border-radius: 2px;
&--row {
align-items: center;
flex-direction: row;
flex-wrap: wrap;
}
&--column {
flex-direction: column;
border: none;
border-radius: 0;
gap: 4px;
padding: 8px;
.key-value-label__key {
background: transparent;
border-radius: 0;
padding: 0;
font-size: 12px;
font-weight: 500;
text-transform: uppercase;
letter-spacing: 0.48px;
line-height: 20px;
}
.key-value-label__value {
background: transparent;
padding: 0;
font-size: 14px;
line-height: 20px;
}
}
flex-wrap: wrap;
&__key,
&__value {
padding: 6px;
padding: 1px 6px;
font-size: 14px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.005em;
&,
&:hover {
color: var(--l1-foreground);
color: var(--text-vanilla-400);
}
}
&__key {
background: var(--l2-background);
background: var(--bg-ink-400);
border-radius: 2px 0 0 2px;
}
&__value {
background: var(--l3-background);
background: var(--bg-slate-400);
}
}
&--clickable {
cursor: pointer;
&:hover {
opacity: 0.8;
}
.lightMode {
.key-value-label {
border-color: var(--bg-vanilla-400);
&__key,
&__value {
color: var(--text-ink-400);
}
&__key {
background: var(--bg-vanilla-300);
}
&__value {
background: var(--bg-vanilla-200);
}
}
}

View File

@@ -1,61 +1,29 @@
import { useMemo } from 'react';
import { Tooltip } from 'antd';
import TrimmedText from '../TrimmedText/TrimmedText';
import './KeyValueLabel.styles.scss';
// Rethink this component later
type KeyValueLabelProps = {
badgeKey: string | React.ReactNode;
badgeValue: string | React.ReactNode;
direction?: 'row' | 'column';
badgeValue: string;
maxCharacters?: number;
onClick?: () => void;
};
export default function KeyValueLabel({
badgeKey,
badgeValue,
direction = 'row',
maxCharacters = 20,
onClick,
}: KeyValueLabelProps): JSX.Element | null {
const isUrl = useMemo(() => /^https?:\/\//.test(badgeValue), [badgeValue]);
if (!badgeKey || !badgeValue) {
return null;
}
const renderValue = (): JSX.Element => {
if (typeof badgeValue !== 'string') {
return <div className="key-value-label__value">{badgeValue}</div>;
}
return (
<Tooltip title={badgeValue}>
<div
className={`key-value-label__value ${
onClick ? 'key-value-label__value--clickable' : ''
}`}
onClick={onClick}
role={onClick ? 'button' : undefined}
tabIndex={onClick ? 0 : undefined}
onKeyDown={
onClick
? (e): void => {
if (e.key === 'Enter' || e.key === ' ') {
onClick();
}
}
: undefined
}
>
<TrimmedText text={badgeValue} maxCharacters={maxCharacters} />
</div>
</Tooltip>
);
};
return (
<div className={`key-value-label key-value-label--${direction}`}>
<div className="key-value-label">
<div className="key-value-label__key">
{typeof badgeKey === 'string' ? (
<TrimmedText text={badgeKey} maxCharacters={maxCharacters} />
@@ -63,13 +31,26 @@ export default function KeyValueLabel({
badgeKey
)}
</div>
{renderValue()}
{isUrl ? (
<a
href={badgeValue}
target="_blank"
rel="noopener noreferrer"
className="key-value-label__value"
>
<TrimmedText text={badgeValue} maxCharacters={maxCharacters} />
</a>
) : (
<Tooltip title={badgeValue}>
<div className="key-value-label__value">
<TrimmedText text={badgeValue} maxCharacters={maxCharacters} />
</div>
</Tooltip>
)}
</div>
);
}
KeyValueLabel.defaultProps = {
maxCharacters: 20,
direction: 'row',
onClick: undefined,
};

View File

@@ -1,191 +0,0 @@
.pretty-view {
padding: 0;
flex: 1;
overflow-y: auto;
scrollbar-width: none;
&::-webkit-scrollbar {
display: none;
}
&__search-input {
position: sticky;
top: 0;
z-index: 1;
margin-bottom: 8px;
width: 100%;
border: 1px solid var(--l2-border) !important;
border-radius: 4px;
font-family: 'SF Mono', 'Geist Mono', 'Fira Code', monospace !important;
font-size: 12px !important;
line-height: 18px !important;
background: var(--l1-background) !important;
outline: none !important;
box-shadow: none !important;
&:focus {
border-color: var(--primary) !important;
outline: none !important;
box-shadow: none !important;
}
}
// Font spec: SF Mono, 12px, 400, 18px line-height, -0.5% letter-spacing
font-family: 'SF Mono', 'Geist Mono', 'Fira Code', monospace;
font-size: 12px;
font-weight: 400;
line-height: 18px;
letter-spacing: -0.06px;
// Override react-json-tree inline styles
ul {
margin: 0 !important;
padding-left: 16px !important;
list-style: none !important;
background-color: transparent !important;
flex: 1;
}
> ul,
&__pinned > ul {
padding-left: 0 !important;
}
// Force font on all tree elements
li,
span,
label {
font-family: inherit !important;
font-size: inherit !important;
line-height: inherit !important;
letter-spacing: inherit !important;
}
// Fix react-json-tree text-indent wrapping
li {
text-indent: 0 !important;
margin-left: 0 !important;
padding-top: 2px !important;
padding-bottom: 2px !important;
}
.pretty-view__actions {
margin-right: 16px;
}
// Leaf node row — hover highlights only this row
&__row {
border-radius: 2px;
display: flex !important;
align-items: baseline;
&:hover {
background-color: var(--l3-background);
.pretty-view__actions {
opacity: 1;
}
}
// Push actions to the right edge
> span {
flex: 1;
}
}
// Nested node (object/array) — hover only on the label line, not children
&__nested-row {
> label,
> span:not(ul span) {
border-radius: 2px;
padding: 1px 2px;
display: inline !important; // keep item string inline with label
}
// Highlight label + item string on hover, show actions
&:hover > label,
&:hover > label + span {
background-color: var(--l3-background);
}
&:hover > label + span .pretty-view__actions {
opacity: 1;
}
// In nested rows, value-row should not take full width
.pretty-view__value-row {
width: auto;
}
}
// Value + actions wrapper (from valueRenderer / getItemString)
&__value-row {
display: inline-flex;
align-items: center;
gap: 6px;
}
// In leaf rows, value-row takes full width to push ... to the right
&__row &__value-row {
justify-content: space-between;
width: 100%;
}
// ... actions button — hidden by default, shown on row hover
&__actions {
opacity: 0;
display: inline-flex;
align-items: center;
cursor: pointer;
color: var(--l2-foreground);
transition: opacity 0.1s ease;
padding: 0 2px;
&:hover {
color: var(--l1-foreground);
}
}
// Pinned items section
&__pinned {
border-bottom: 1px solid var(--l2-border);
padding-bottom: 8px;
margin-bottom: 8px;
padding-left: 0px;
li {
margin-left: 0 !important;
padding-left: 0 !important;
display: flex !important;
align-items: baseline;
}
}
&__pinned-header {
font-size: 11px;
font-weight: 500;
color: var(--l3-foreground);
text-transform: uppercase;
letter-spacing: 0.5px;
padding: 4px;
}
&__pinned-label {
display: inline-flex;
align-items: baseline;
gap: 6px;
}
&__pinned-icon {
flex-shrink: 0;
color: var(--text-robin-400);
cursor: pointer;
fill: var(--text-robin-400);
transition: fill 0.1s ease;
&:hover {
color: var(--text-robin-300);
fill: transparent;
}
}
}

View File

@@ -1,300 +0,0 @@
import { useCallback, useMemo } from 'react';
import { JSONTree, KeyPath } from 'react-json-tree';
import { Copy, Ellipsis, Pin, PinOff } from '@signozhq/icons';
import { Input } from '@signozhq/input';
import type { MenuProps } from 'antd';
// TODO: Replace antd Dropdown with @signozhq/ui component when moving to design library
import { Dropdown } from 'antd';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { darkTheme, lightTheme, themeExtension } from './constants';
import usePinnedFields from './hooks/usePinnedFields';
import useSearchFilter, { filterTree } from './hooks/useSearchFilter';
import {
copyToClipboard,
keyPathToDisplayString,
keyPathToForward,
serializeKeyPath,
} from './utils';
import './PrettyView.styles.scss';
export interface FieldContext {
fieldKey: string;
fieldKeyPath: (string | number)[];
fieldValue: unknown;
isNested: boolean;
}
export interface PrettyViewAction {
key: string;
label: React.ReactNode;
icon?: React.ReactNode;
onClick: (context: FieldContext) => void;
}
export interface PrettyViewProps {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
data: Record<string, any>;
actions?: PrettyViewAction[];
searchable?: boolean;
showPinned?: boolean;
drawerKey?: string;
}
function PrettyView({
data,
actions,
searchable = true,
showPinned = false,
drawerKey = 'default',
}: PrettyViewProps): JSX.Element {
const isDarkMode = useIsDarkMode();
const { searchQuery, setSearchQuery, filteredData } = useSearchFilter(data);
const {
isPinned,
togglePin,
pinnedEntries,
pinnedData,
displayKeyToForwardPath,
} = usePinnedFields(data, drawerKey);
const filteredPinnedData = useMemo(() => {
const trimmed = searchQuery.trim();
if (!trimmed) {
return pinnedData;
}
return filterTree(pinnedData, trimmed) || {};
}, [pinnedData, searchQuery]);
const theme = useMemo(
() => ({
extend: isDarkMode ? darkTheme : lightTheme,
...themeExtension,
}),
[isDarkMode],
);
const shouldExpandNodeInitially = useCallback(
(
_keyPath: readonly (string | number)[],
_data: unknown,
level: number,
): boolean => level < 5,
[],
);
const buildMenuItems = useCallback(
(context: FieldContext): MenuProps['items'] => {
// todo: drive dropdown through config.
const copyItem = {
key: 'copy',
label: 'Copy',
icon: <Copy size={12} />,
onClick: (): void => {
copyToClipboard(context.fieldValue);
},
};
const items: NonNullable<MenuProps['items']> = [copyItem];
// Pin action only for leaf nodes
if (!context.isNested) {
// Resolve the correct forward path — pinned tree uses display keys
// which don't match the original serialized path
const resolvedPath =
displayKeyToForwardPath[context.fieldKey] || context.fieldKeyPath;
const serialized = serializeKeyPath(resolvedPath);
const pinned = isPinned(serialized);
items.push({
key: 'pin',
label: pinned ? 'Unpin field' : 'Pin field',
icon: pinned ? <PinOff size={12} /> : <Pin size={12} />,
onClick: (): void => {
togglePin(resolvedPath);
},
});
}
if (actions && actions.length > 0) {
//todo: why this divider?
items.push({ type: 'divider' as const, key: 'divider' });
actions.forEach((action) => {
items.push({
key: action.key,
label: action.label,
icon: action.icon,
onClick: (): void => {
action.onClick(context);
},
});
});
}
return items;
},
[actions, isPinned, togglePin, displayKeyToForwardPath],
);
const renderWithActions = useCallback(
({
content,
fieldKey,
fieldKeyPath,
value,
isNested,
}: {
content: React.ReactNode;
fieldKey: string;
fieldKeyPath: (string | number)[];
value: unknown;
isNested: boolean;
}): React.ReactNode => {
const context: FieldContext = {
fieldKey,
fieldKeyPath,
fieldValue: value,
isNested,
};
const menuItems = buildMenuItems(context);
return (
<span className="pretty-view__value-row">
<span>{content}</span>
<Dropdown
menu={{
items: menuItems,
onClick: (e): void => {
e.domEvent.stopPropagation();
},
}}
trigger={['click']}
placement="bottomLeft"
getPopupContainer={(trigger): HTMLElement =>
trigger.parentElement || document.body
}
>
<span
className="pretty-view__actions"
onClick={(e): void => e.stopPropagation()}
role="button"
tabIndex={0}
>
<Ellipsis size={12} />
</span>
</Dropdown>
</span>
);
},
[buildMenuItems],
);
const getItemString = useCallback(
(
_nodeType: string,
nodeData: unknown,
itemType: React.ReactNode,
itemString: string,
keyPath: KeyPath,
): // eslint-disable-next-line max-params
React.ReactNode => {
const forwardPath = keyPathToForward(keyPath);
return renderWithActions({
content: (
<>
{itemType} {itemString}
</>
),
fieldKey: keyPathToDisplayString(keyPath),
fieldKeyPath: forwardPath,
value: nodeData,
isNested: true,
});
},
[renderWithActions],
);
const valueRenderer = useCallback(
(
valueAsString: unknown,
value: unknown,
...keyPath: KeyPath
): React.ReactNode => {
const forwardPath = keyPathToForward(keyPath);
return renderWithActions({
content: String(valueAsString),
fieldKey: keyPathToDisplayString(keyPath),
fieldKeyPath: forwardPath,
value,
isNested: typeof value === 'object' && value !== null,
});
},
[renderWithActions],
);
const pinnedLabelRenderer = useCallback(
(keyPath: KeyPath): React.ReactNode => {
const displayKey = String(keyPath[0]);
const entry = pinnedEntries.find((e) => e.displayKey === displayKey);
return (
<span className="pretty-view__pinned-label">
<Pin
size={12}
className="pretty-view__pinned-icon"
onClick={(): void => {
if (entry) {
togglePin(entry.forwardPath);
}
}}
/>
<span>{displayKey}</span>
</span>
);
},
[togglePin, pinnedEntries],
);
return (
<div className="pretty-view">
{searchable && (
<Input
className="pretty-view__search-input"
type="text"
placeholder="Search for a field..."
value={searchQuery}
onChange={(e): void => setSearchQuery(e.target.value)}
/>
)}
{showPinned && Object.keys(filteredPinnedData).length > 0 && (
<div className="pretty-view__pinned">
<div className="pretty-view__pinned-header">PINNED ITEMS</div>
<JSONTree
key={`pinned-${searchQuery}`}
data={filteredPinnedData}
theme={theme}
invertTheme={false}
hideRoot
shouldExpandNodeInitially={shouldExpandNodeInitially}
valueRenderer={valueRenderer}
getItemString={getItemString}
labelRenderer={pinnedLabelRenderer}
/>
</div>
)}
<JSONTree
key={searchQuery}
data={filteredData}
theme={theme}
invertTheme={false}
hideRoot
shouldExpandNodeInitially={shouldExpandNodeInitially}
valueRenderer={valueRenderer}
getItemString={getItemString}
/>
</div>
);
}
export default PrettyView;

View File

@@ -1,62 +0,0 @@
// Dark theme — SigNoz design palette
export const darkTheme = {
scheme: 'signoz-dark',
author: 'signoz',
base00: 'transparent',
base01: '#161922',
base02: '#1d212d',
base03: '#62687C',
base04: '#ADB4C2',
base05: '#ADB4C2',
base06: '#ADB4C2',
base07: '#ADB4C2',
base08: '#EA6D71',
base09: '#7CEDBD',
base0A: '#7CEDBD',
base0B: '#ADB4C2',
base0C: '#23C4F8',
base0D: '#95ACFB',
base0E: '#95ACFB',
base0F: '#AD7F58',
};
// Light theme
export const lightTheme = {
scheme: 'signoz-light',
author: 'signoz',
base00: 'transparent',
base01: '#F9F9FB',
base02: '#EFF0F3',
base03: '#80828D',
base04: '#62636C',
base05: '#62636C',
base06: '#62636C',
base07: '#1E1F24',
base08: '#E5484D',
base09: '#168757',
base0A: '#168757',
base0B: '#62636C',
base0C: '#157594',
base0D: '#2F48A0',
base0E: '#2F48A0',
base0F: '#684C35',
};
export const themeExtension = {
value: ({
style,
}: {
style: Record<string, unknown>;
}): { style: Record<string, unknown>; className: string } => ({
style: { ...style },
className: 'pretty-view__row',
}),
nestedNode: ({
style,
}: {
style: Record<string, unknown>;
}): { style: Record<string, unknown>; className: string } => ({
style: { ...style },
className: 'pretty-view__nested-row',
}),
};

View File

@@ -1,127 +0,0 @@
import { useCallback, useMemo, useState } from 'react';
import {
deserializeKeyPath,
keyPathToDisplayString,
resolveValueByKeys,
serializeKeyPath,
} from '../utils';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type AnyRecord = Record<string, any>;
const STORAGE_PREFIX = 'pinnedFields';
function loadFromStorage(storageKey: string): string[] {
try {
const stored = localStorage.getItem(storageKey);
return stored ? JSON.parse(stored) : [];
} catch {
return [];
}
}
function saveToStorage(storageKey: string, keys: string[]): void {
localStorage.setItem(storageKey, JSON.stringify(keys));
}
export interface PinnedEntry {
serializedKey: string;
displayKey: string;
forwardPath: (string | number)[];
value: unknown;
}
export interface UsePinnedFieldsReturn {
isPinned: (serializedKey: string) => boolean;
togglePin: (forwardPath: (string | number)[]) => void;
pinnedEntries: PinnedEntry[];
pinnedData: AnyRecord;
displayKeyToForwardPath: Record<string, (string | number)[]>;
}
function usePinnedFields(
data: AnyRecord,
drawerKey: string,
): UsePinnedFieldsReturn {
const storageKey = `${STORAGE_PREFIX}:${drawerKey}`;
// Stored as serialized keyPath arrays (JSON strings)
const [pinnedSerializedKeys, setPinnedSerializedKeys] = useState<Set<string>>(
() => new Set(loadFromStorage(storageKey)),
);
const togglePin = useCallback(
(forwardPath: (string | number)[]): void => {
const serialized = serializeKeyPath(forwardPath);
setPinnedSerializedKeys((prev) => {
const next = new Set(prev);
if (next.has(serialized)) {
next.delete(serialized);
} else {
next.add(serialized);
}
saveToStorage(storageKey, Array.from(next));
return next;
});
},
[storageKey],
);
const isPinned = useCallback(
(serializedKey: string): boolean => pinnedSerializedKeys.has(serializedKey),
[pinnedSerializedKeys],
);
const pinnedEntries = useMemo(
(): PinnedEntry[] =>
Array.from(pinnedSerializedKeys)
.map((serializedKey) => {
const forwardPath = deserializeKeyPath(serializedKey);
if (!forwardPath) {
return null;
}
return {
serializedKey,
displayKey: keyPathToDisplayString(
[...forwardPath].reverse() as readonly (string | number)[],
),
forwardPath,
value: resolveValueByKeys(data, forwardPath),
};
})
.filter(
(entry): entry is PinnedEntry =>
entry !== null && entry.value !== undefined,
),
[pinnedSerializedKeys, data],
);
// Flat object for the pinned JSONTree — use display key as the object key
const pinnedData = useMemo(
() =>
Object.fromEntries(
pinnedEntries.map((entry) => [entry.displayKey, entry.value]),
),
[pinnedEntries],
);
// Map from display key to original forward path — for unpin from pinned tree
const displayKeyToForwardPath = useMemo(
(): Record<string, (string | number)[]> =>
Object.fromEntries(
pinnedEntries.map((entry) => [entry.displayKey, entry.forwardPath]),
),
[pinnedEntries],
);
return {
isPinned,
togglePin,
pinnedEntries,
pinnedData,
displayKeyToForwardPath,
};
}
export default usePinnedFields;

View File

@@ -1,82 +0,0 @@
import { useMemo, useState } from 'react';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type AnyRecord = Record<string, any>;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
type AnyValue = any;
function isLeaf(value: unknown): boolean {
return value === null || value === undefined || typeof value !== 'object';
}
function matchesQuery(text: string, lowerQuery: string): boolean {
return text.toLowerCase().includes(lowerQuery);
}
// Filter a single value (leaf, array, or object) against the query.
// Returns the filtered value or null if no match.
function filterValue(value: AnyValue, query: string): AnyValue | null {
if (isLeaf(value)) {
return matchesQuery(String(value), query.toLowerCase()) ? value : null;
}
if (Array.isArray(value)) {
return filterArray(value, query);
}
return filterTree(value, query);
}
// Recursively filter an array, keeping only elements that match
function filterArray(arr: AnyValue[], query: string): AnyValue[] | null {
const results = arr
.map((item) => filterValue(item, query))
.filter((item) => item !== null);
return results.length > 0 ? results : null;
}
// Recursively filter the data tree, keeping only branches with matching keys or values
export function filterTree(obj: AnyRecord, query: string): AnyRecord | null {
const result: AnyRecord = {};
const lowerQuery = query.toLowerCase();
let hasMatch = false;
for (const [key, value] of Object.entries(obj)) {
if (matchesQuery(key, lowerQuery)) {
result[key] = value;
hasMatch = true;
continue;
}
const filtered = filterValue(value, query);
if (filtered !== null) {
result[key] = filtered;
hasMatch = true;
}
}
return hasMatch ? result : null;
}
interface UseSearchFilterReturn {
searchQuery: string;
setSearchQuery: (query: string) => void;
filteredData: AnyRecord;
}
function useSearchFilter(data: AnyRecord): UseSearchFilterReturn {
const [searchQuery, setSearchQuery] = useState('');
const filteredData = useMemo(() => {
const trimmed = searchQuery.trim();
if (!trimmed) {
return data;
}
return filterTree(data, trimmed) || {};
}, [data, searchQuery]);
return { searchQuery, setSearchQuery, filteredData };
}
export default useSearchFilter;

View File

@@ -1,2 +0,0 @@
export type { PrettyViewProps } from './PrettyView';
export { default as PrettyView } from './PrettyView';

View File

@@ -1,58 +0,0 @@
import { toast } from '@signozhq/sonner';
export function copyToClipboard(value: unknown): void {
const text =
typeof value === 'object' ? JSON.stringify(value, null, 2) : String(value);
// eslint-disable-next-line no-restricted-properties
navigator.clipboard.writeText(text);
toast.success('Copied to clipboard', {
richColors: true,
position: 'top-right',
});
}
// Resolve a value from a nested object using an array of keys (not dot-notation)
// e.g. resolveValueByKeys({ tagMap: { 'cloud.account.id': 'x' } }, ['tagMap', 'cloud.account.id']) → 'x'
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export function resolveValueByKeys(
data: Record<string, any>,
keys: (string | number)[],
): unknown {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
return keys.reduce((obj: any, key) => obj?.[key], data);
}
// Convert react-json-tree's reversed keyPath to forward order
// e.g. ['cloud.account.id', 'tagMap'] → ['tagMap', 'cloud.account.id']
export function keyPathToForward(
keyPath: readonly (string | number)[],
): (string | number)[] {
return [...keyPath].reverse();
}
// Display-friendly string for a keyPath
// e.g. ['tagMap', 'cloud.account.id'] → 'tagMap.cloud.account.id'
export function keyPathToDisplayString(
keyPath: readonly (string | number)[],
): string {
return [...keyPath].reverse().join('.');
}
// Serialize keyPath for storage/comparison (JSON stringified array)
export function serializeKeyPath(keyPath: (string | number)[]): string {
return JSON.stringify(keyPath);
}
export function deserializeKeyPath(
serialized: string,
): (string | number)[] | null {
try {
const parsed = JSON.parse(serialized);
if (Array.isArray(parsed)) {
return parsed;
}
return null;
} catch {
return null;
}
}

View File

@@ -1,42 +0,0 @@
.resizable-box {
position: relative;
overflow: hidden;
&--disabled {
flex: 1;
min-height: 0;
}
&__content {
width: 100%;
height: 100%;
overflow: hidden;
}
&__handle {
position: absolute;
z-index: 10;
background: var(--l2-border);
&:hover,
&:active {
background: var(--primary);
}
&--vertical {
bottom: 0;
left: 0;
right: 0;
height: 4px;
cursor: row-resize;
}
&--horizontal {
right: 0;
top: 0;
bottom: 0;
width: 4px;
cursor: col-resize;
}
}
}

View File

@@ -1,88 +0,0 @@
import { useCallback, useRef, useState } from 'react';
import './ResizableBox.styles.scss';
export interface ResizableBoxProps {
children: React.ReactNode;
direction?: 'vertical' | 'horizontal';
defaultHeight?: number;
minHeight?: number;
maxHeight?: number;
defaultWidth?: number;
minWidth?: number;
maxWidth?: number;
onResize?: (size: number) => void;
disabled?: boolean;
className?: string;
}
function ResizableBox({
children,
direction = 'vertical',
defaultHeight = 200,
minHeight = 50,
maxHeight = Infinity,
defaultWidth = 200,
minWidth = 50,
maxWidth = Infinity,
onResize,
disabled = false,
className,
}: ResizableBoxProps): JSX.Element {
const isHorizontal = direction === 'horizontal';
const [size, setSize] = useState(isHorizontal ? defaultWidth : defaultHeight);
const containerRef = useRef<HTMLDivElement>(null);
const handleMouseDown = useCallback(
(e: React.MouseEvent): void => {
e.preventDefault();
const startPos = isHorizontal ? e.clientX : e.clientY;
const startSize = size;
const min = isHorizontal ? minWidth : minHeight;
const max = isHorizontal ? maxWidth : maxHeight;
const onMouseMove = (moveEvent: MouseEvent): void => {
const currentPos = isHorizontal ? moveEvent.clientX : moveEvent.clientY;
const delta = currentPos - startPos;
const newSize = Math.min(max, Math.max(min, startSize + delta));
setSize(newSize);
onResize?.(newSize);
};
const onMouseUp = (): void => {
document.removeEventListener('mousemove', onMouseMove);
document.removeEventListener('mouseup', onMouseUp);
document.body.style.cursor = '';
document.body.style.userSelect = '';
};
document.body.style.cursor = isHorizontal ? 'col-resize' : 'row-resize';
document.body.style.userSelect = 'none';
document.addEventListener('mousemove', onMouseMove);
document.addEventListener('mouseup', onMouseUp);
},
[size, isHorizontal, minWidth, maxWidth, minHeight, maxHeight, onResize],
);
const containerStyle = disabled
? undefined
: isHorizontal
? { width: size }
: { height: size };
const handleClass = `resizable-box__handle resizable-box__handle--${direction}`;
return (
<div
ref={containerRef}
className={`resizable-box ${disabled ? 'resizable-box--disabled' : ''} ${
className || ''
}`}
style={containerStyle}
>
<div className="resizable-box__content">{children}</div>
{!disabled && <div className={handleClass} onMouseDown={handleMouseDown} />}
</div>
);
}
export default ResizableBox;

View File

@@ -1,2 +0,0 @@
export type { ResizableBoxProps } from './ResizableBox';
export { default as ResizableBox } from './ResizableBox';

View File

@@ -6538,11 +6538,6 @@
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.0.tgz#d774355e41f372d5350a4d0714abb48194a489c3"
integrity sha512-t7dhREVv6dbNj0q17X12j7yDG4bD/DHYX7o5/DbDxobP0HnGPgpRz2Ej77aL7TZT3DSw13fqUTj8J4mMnqa7WA==
"@types/lodash@^4.17.0", "@types/lodash@^4.17.15":
version "4.17.24"
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.24.tgz#4ae334fc62c0e915ca8ed8e35dcc6d4eeb29215f"
integrity sha512-gIW7lQLZbue7lRSWEFql49QJJWThrTFFeIMJdp3eH4tKoxm1OvEPg02rm4wCCSHS0cL3/Fizimb35b7k8atwsQ==
"@types/mdast@^3.0.0":
version "3.0.12"
resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-3.0.12.tgz#beeb511b977c875a5b0cc92eab6fcac2f0895514"
@@ -8957,7 +8952,7 @@ color-string@^1.9.0:
color-name "^1.0.0"
simple-swizzle "^0.2.2"
color@^4.2.1, color@^4.2.3:
color@^4.2.1:
version "4.2.3"
resolved "https://registry.npmjs.org/color/-/color-4.2.3.tgz"
integrity sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==
@@ -9389,11 +9384,6 @@ csstype@^3.1.2:
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81"
integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==
csstype@^3.1.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.2.3.tgz#ec48c0f3e993e50648c86da559e2610995cf989a"
integrity sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==
"d3-array@1 - 3", "d3-array@2 - 3", "d3-array@2.10.0 - 3":
version "3.2.3"
resolved "https://registry.npmjs.org/d3-array/-/d3-array-3.2.3.tgz"
@@ -16829,11 +16819,6 @@ rc-virtual-list@^3.11.1, rc-virtual-list@^3.5.1, rc-virtual-list@^3.5.2:
rc-resize-observer "^1.0.0"
rc-util "^5.36.0"
re-resizable@^6.11.2:
version "6.11.2"
resolved "https://registry.yarnpkg.com/re-resizable/-/re-resizable-6.11.2.tgz#2e8f7119ca3881d5b5aea0ffa014a80e5c1252b3"
integrity sha512-2xI2P3OHs5qw7K0Ud1aLILK6MQxW50TcO+DetD9eIV58j84TqYeHoZcL9H4GXFXXIh7afhH8mv5iUCXII7OW7A==
react-addons-update@15.6.3:
version "15.6.3"
resolved "https://registry.yarnpkg.com/react-addons-update/-/react-addons-update-15.6.3.tgz#c449c309154024d04087b206d0400e020547b313"
@@ -16841,16 +16826,6 @@ react-addons-update@15.6.3:
dependencies:
object-assign "^4.1.0"
react-base16-styling@^0.10.0:
version "0.10.0"
resolved "https://registry.yarnpkg.com/react-base16-styling/-/react-base16-styling-0.10.0.tgz#5d5f019bd4dc5870c3e92fd9d5410533a0bbb0c6"
integrity sha512-H1k2eFB6M45OaiRru3PBXkuCcn2qNmx+gzLb4a9IPMR7tMH8oBRXU5jGbPDYG1Hz+82d88ED0vjR8BmqU3pQdg==
dependencies:
"@types/lodash" "^4.17.0"
color "^4.2.3"
csstype "^3.1.3"
lodash-es "^4.17.21"
react-beautiful-dnd@13.1.1:
version "13.1.1"
resolved "https://registry.yarnpkg.com/react-beautiful-dnd/-/react-beautiful-dnd-13.1.1.tgz#b0f3087a5840920abf8bb2325f1ffa46d8c4d0a2"
@@ -16915,14 +16890,6 @@ react-draggable@^4.0.0, react-draggable@^4.0.3:
clsx "^1.1.1"
prop-types "^15.8.1"
react-draggable@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/react-draggable/-/react-draggable-4.5.0.tgz#0b274ccb6965fcf97ed38fcf7e3cc223bc48cdf5"
integrity sha512-VC+HBLEZ0XJxnOxVAZsdRi8rD04Iz3SiiKOoYzamjylUcju/hP9np/aZdLHf/7WOD268WMoNJMvYfB5yAK45cw==
dependencies:
clsx "^2.1.1"
prop-types "^15.8.1"
react-error-boundary@4.0.11:
version "4.0.11"
resolved "https://registry.yarnpkg.com/react-error-boundary/-/react-error-boundary-4.0.11.tgz#36bf44de7746714725a814630282fee83a7c9a1c"
@@ -17013,14 +16980,6 @@ react-is@^18.3.1:
resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e"
integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==
react-json-tree@0.20.0:
version "0.20.0"
resolved "https://registry.yarnpkg.com/react-json-tree/-/react-json-tree-0.20.0.tgz#1e7fd26f093bd49d733f80dd9b6d40142e09f7c9"
integrity sha512-h+f9fUNAxzBx1rbrgUF7+zSWKGHDtt2VPYLErIuB0JyKGnWgFMM21ksqQyb3EXwXNnoMW2rdE5kuAaubgGOx2Q==
dependencies:
"@types/lodash" "^4.17.15"
react-base16-styling "^0.10.0"
react-kapsule@^2.5:
version "2.5.7"
resolved "https://registry.yarnpkg.com/react-kapsule/-/react-kapsule-2.5.7.tgz#dcd957ae8e897ff48055fc8ff48ed04ebe3c5bd2"
@@ -17140,15 +17099,6 @@ react-resizable@^3.0.4:
prop-types "15.x"
react-draggable "^4.0.3"
react-rnd@10.5.3:
version "10.5.3"
resolved "https://registry.yarnpkg.com/react-rnd/-/react-rnd-10.5.3.tgz#f8c484034276a561e8aa2fbf6a94580596621013"
integrity sha512-s/sIT3pGZnQ+57egijkTp9mizjIWrJz68Pq6yd+F/wniFY3IriML18dUXnQe/HP9uMiJ+9MAp44hljG99fZu6Q==
dependencies:
re-resizable "^6.11.2"
react-draggable "^4.5.0"
tslib "2.6.2"
react-router-dom-v5-compat@6.27.0:
version "6.27.0"
resolved "https://registry.yarnpkg.com/react-router-dom-v5-compat/-/react-router-dom-v5-compat-6.27.0.tgz#19429aefa130ee151e6f4487d073d919ec22d458"
@@ -19234,11 +19184,6 @@ tsconfig-paths@^4.2.0:
minimist "^1.2.6"
strip-bom "^3.0.0"
tslib@2.6.2, tslib@^2.0.0:
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
tslib@2.7.0:
version "2.7.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.7.0.tgz#d9b40c5c40ab59e8738f297df3087bf1a2690c01"
@@ -19249,6 +19194,11 @@ tslib@^1.14.1, tslib@^1.8.1:
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
tslib@^2.0.0:
version "2.6.2"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
tslib@^2.0.3, tslib@^2.1.0, tslib@^2.3.0:
version "2.5.0"
resolved "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz"

View File

@@ -63,7 +63,6 @@ type RetryConfig struct {
func newConfig() factory.Config {
return Config{
Provider: "noop",
BufferSize: 1000,
BatchSize: 100,
FlushInterval: time.Second,

View File

@@ -138,7 +138,14 @@ func (m *module) listMeterMetrics(ctx context.Context, params *metricsexplorerty
func (m *module) listMetrics(ctx context.Context, orgID valuer.UUID, params *metricsexplorertypes.ListMetricsParams) (*metricsexplorertypes.ListMetricsResponse, error) {
sb := sqlbuilder.NewSelectBuilder()
sb.Select("DISTINCT metric_name")
sb.Select(
"metric_name",
"anyLast(description) AS description",
"anyLast(type) AS metric_type",
"argMax(unit, unix_milli) AS metric_unit",
"anyLast(temporality) AS temporality",
"anyLast(is_monotonic) AS is_monotonic",
)
if params.Start != nil && params.End != nil {
start, end, distributedTsTable, _ := telemetrymetrics.WhichTSTableToUse(uint64(*params.Start), uint64(*params.End), nil)
@@ -157,6 +164,7 @@ func (m *module) listMetrics(ctx context.Context, orgID valuer.UUID, params *met
sb.Where(sb.Like("lower(metric_name)", fmt.Sprintf("%%%s%%", searchLower)))
}
sb.GroupBy("metric_name")
sb.OrderBy("metric_name ASC")
sb.Limit(params.Limit)
@@ -170,43 +178,47 @@ func (m *module) listMetrics(ctx context.Context, orgID valuer.UUID, params *met
}
defer rows.Close()
metricNames := make([]string, 0)
var metrics []metricsexplorertypes.ListMetric
var metricNames []string
for rows.Next() {
var name string
if err := rows.Scan(&name); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metric name")
var metric metricsexplorertypes.ListMetric
if err := rows.Scan(
&metric.MetricName,
&metric.Description,
&metric.MetricType,
&metric.MetricUnit,
&metric.Temporality,
&metric.IsMonotonic,
); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metric")
}
metricNames = append(metricNames, name)
metrics = append(metrics, metric)
metricNames = append(metricNames, metric.MetricName)
}
if err := rows.Err(); err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metric names")
return nil, errors.WrapInternalf(err, errors.CodeInternal, "error iterating metrics")
}
if len(metricNames) == 0 {
if len(metrics) == 0 {
return &metricsexplorertypes.ListMetricsResponse{
Metrics: []metricsexplorertypes.ListMetric{},
}, nil
}
metadata, err := m.GetMetricMetadataMulti(ctx, orgID, metricNames)
// Overlay any user-updated metadata on top of the timeseries metadata.
updatedMetadata, err := m.fetchUpdatedMetadata(ctx, orgID, metricNames)
if err != nil {
return nil, err
}
metrics := make([]metricsexplorertypes.ListMetric, 0, len(metricNames))
for _, name := range metricNames {
metric := metricsexplorertypes.ListMetric{
MetricName: name,
for i := range metrics {
if meta, ok := updatedMetadata[metrics[i].MetricName]; ok && meta != nil {
metrics[i].Description = meta.Description
metrics[i].MetricType = meta.MetricType
metrics[i].MetricUnit = meta.MetricUnit
metrics[i].Temporality = meta.Temporality
metrics[i].IsMonotonic = meta.IsMonotonic
}
if meta, ok := metadata[name]; ok && meta != nil {
metric.Description = meta.Description
metric.MetricType = meta.MetricType
metric.MetricUnit = meta.MetricUnit
metric.Temporality = meta.Temporality
metric.IsMonotonic = meta.IsMonotonic
}
metrics = append(metrics, metric)
}
return &metricsexplorertypes.ListMetricsResponse{
@@ -243,19 +255,18 @@ func (m *module) GetStats(ctx context.Context, orgID valuer.UUID, req *metricsex
}, nil
}
// Get metadata for all metrics
// Overlay any user-updated metadata on top of the timeseries metadata
// that was already fetched in the combined query.
metricNames := make([]string, len(metricStats))
for i := range metricStats {
metricNames[i] = metricStats[i].MetricName
}
metadata, err := m.GetMetricMetadataMulti(ctx, orgID, metricNames)
updatedMetadata, err := m.fetchUpdatedMetadata(ctx, orgID, metricNames)
if err != nil {
return nil, err
}
// Enrich stats with metadata
enrichStatsWithMetadata(metricStats, metadata)
enrichStatsWithMetadata(metricStats, updatedMetadata)
return &metricsexplorertypes.StatsResponse{
Metrics: metricStats,
@@ -984,11 +995,14 @@ func (m *module) fetchMetricsStatsWithSamples(
samplesTable := telemetrymetrics.WhichSamplesTableToUse(uint64(req.Start), uint64(req.End), metrictypes.UnspecifiedType, metrictypes.TimeAggregationUnspecified, nil)
countExp := telemetrymetrics.CountExpressionForSamplesTable(samplesTable)
// Timeseries counts per metric
// Timeseries counts and metadata per metric.
tsSB := sqlbuilder.NewSelectBuilder()
tsSB.Select(
"metric_name",
"uniq(fingerprint) AS timeseries",
"anyLast(description) AS description",
"anyLast(type) AS metric_type",
"argMax(unit, unix_milli) AS metric_unit",
)
tsSB.From(fmt.Sprintf("%s.%s", telemetrymetrics.DBName, distributedTsTable))
tsSB.Where(tsSB.Between("unix_milli", start, end))
@@ -1036,6 +1050,9 @@ func (m *module) fetchMetricsStatsWithSamples(
"COALESCE(ts.timeseries, 0) AS timeseries",
"COALESCE(s.samples, 0) AS samples",
"COUNT(*) OVER() AS total",
"ts.description AS description",
"ts.metric_type AS metric_type",
"ts.metric_unit AS metric_unit",
)
finalSB.From("__time_series_counts ts")
finalSB.JoinWithOption(sqlbuilder.FullOuterJoin, "__sample_counts s", "ts.metric_name = s.metric_name")
@@ -1071,7 +1088,7 @@ func (m *module) fetchMetricsStatsWithSamples(
metricStat metricsexplorertypes.Stat
rowTotal uint64
)
if err := rows.Scan(&metricStat.MetricName, &metricStat.TimeSeries, &metricStat.Samples, &rowTotal); err != nil {
if err := rows.Scan(&metricStat.MetricName, &metricStat.TimeSeries, &metricStat.Samples, &rowTotal, &metricStat.Description, &metricStat.MetricType, &metricStat.MetricUnit); err != nil {
return nil, 0, errors.WrapInternalf(err, errors.CodeInternal, "failed to scan metrics stats row")
}
metricStats = append(metricStats, metricStat)

View File

@@ -40,7 +40,6 @@ type querier struct {
promEngine prometheus.Prometheus
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation]
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
auditStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation]
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder
@@ -57,7 +56,6 @@ func New(
promEngine prometheus.Prometheus,
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
logStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
auditStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
metricStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
meterStmtBuilder qbtypes.StatementBuilder[qbtypes.MetricAggregation],
traceOperatorStmtBuilder qbtypes.TraceOperatorStatementBuilder,
@@ -71,7 +69,6 @@ func New(
promEngine: promEngine,
traceStmtBuilder: traceStmtBuilder,
logStmtBuilder: logStmtBuilder,
auditStmtBuilder: auditStmtBuilder,
metricStmtBuilder: metricStmtBuilder,
meterStmtBuilder: meterStmtBuilder,
traceOperatorStmtBuilder: traceOperatorStmtBuilder,
@@ -364,11 +361,7 @@ func (q *querier) QueryRange(ctx context.Context, orgID valuer.UUID, req *qbtype
case qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]:
spec.ShiftBy = extractShiftFromBuilderQuery(spec)
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: req.Start, To: req.End}, req.RequestType)
stmtBuilder := q.logStmtBuilder
if spec.Source == telemetrytypes.SourceAudit {
stmtBuilder = q.auditStmtBuilder
}
bq := newBuilderQuery(q.logger, q.telemetryStore, stmtBuilder, spec, timeRange, req.RequestType, tmplVars)
bq := newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, tmplVars)
queries[spec.Name] = bq
steps[spec.Name] = spec.StepInterval
case qbtypes.QueryBuilderQuery[qbtypes.MetricAggregation]:
@@ -557,11 +550,7 @@ func (q *querier) QueryRawStream(ctx context.Context, orgID valuer.UUID, req *qb
case <-tick:
// timestamp end is not specified here
timeRange := adjustTimeRangeForShift(spec, qbtypes.TimeRange{From: tsStart}, req.RequestType)
liveTailStmtBuilder := q.logStmtBuilder
if spec.Source == telemetrytypes.SourceAudit {
liveTailStmtBuilder = q.auditStmtBuilder
}
bq := newBuilderQuery(q.logger, q.telemetryStore, liveTailStmtBuilder, spec, timeRange, req.RequestType, map[string]qbtypes.VariableItem{
bq := newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, spec, timeRange, req.RequestType, map[string]qbtypes.VariableItem{
"id": {
Value: updatedLogID,
},
@@ -861,11 +850,7 @@ func (q *querier) createRangedQuery(originalQuery qbtypes.Query, timeRange qbtyp
specCopy := qt.spec.Copy()
specCopy.ShiftBy = extractShiftFromBuilderQuery(specCopy)
adjustedTimeRange := adjustTimeRangeForShift(specCopy, timeRange, qt.kind)
shiftStmtBuilder := q.logStmtBuilder
if qt.spec.Source == telemetrytypes.SourceAudit {
shiftStmtBuilder = q.auditStmtBuilder
}
return newBuilderQuery(q.logger, q.telemetryStore, shiftStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
return newBuilderQuery(q.logger, q.telemetryStore, q.logStmtBuilder, specCopy, adjustedTimeRange, qt.kind, qt.variables)
case *builderQuery[qbtypes.MetricAggregation]:
specCopy := qt.spec.Copy()

View File

@@ -47,7 +47,6 @@ func TestQueryRange_MetricTypeMissing(t *testing.T) {
nil, // prometheus
nil, // traceStmtBuilder
nil, // logStmtBuilder
nil, // auditStmtBuilder
nil, // metricStmtBuilder
nil, // meterStmtBuilder
nil, // traceOperatorStmtBuilder
@@ -111,7 +110,6 @@ func TestQueryRange_MetricTypeFromStore(t *testing.T) {
nil, // prometheus
nil, // traceStmtBuilder
nil, // logStmtBuilder
nil, // auditStmtBuilder
&mockMetricStmtBuilder{}, // metricStmtBuilder
nil, // meterStmtBuilder
nil, // traceOperatorStmtBuilder

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetryaudit"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
@@ -64,11 +63,6 @@ func newProvider(
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetryaudit.DBName,
telemetryaudit.AuditLogsTableName,
telemetryaudit.TagAttributesTableName,
telemetryaudit.LogAttributeKeysTblName,
telemetryaudit.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
telemetrymetadata.ColumnEvolutionMetadataTableName,
@@ -88,13 +82,13 @@ func newProvider(
telemetryStore,
)
// Create trace operator statement builder
// ADD: Create trace operator statement builder
traceOperatorStmtBuilder := telemetrytraces.NewTraceOperatorStatementBuilder(
settings,
telemetryMetadataStore,
traceFieldMapper,
traceConditionBuilder,
traceStmtBuilder,
traceStmtBuilder, // Pass the regular trace statement builder
traceAggExprRewriter,
)
@@ -118,26 +112,6 @@ func newProvider(
telemetrylogs.GetBodyJSONKey,
)
// Create audit statement builder
auditFieldMapper := telemetryaudit.NewFieldMapper()
auditConditionBuilder := telemetryaudit.NewConditionBuilder(auditFieldMapper)
auditAggExprRewriter := querybuilder.NewAggExprRewriter(
settings,
telemetryaudit.DefaultFullTextColumn,
auditFieldMapper,
auditConditionBuilder,
nil,
)
auditStmtBuilder := telemetryaudit.NewAuditQueryStatementBuilder(
settings,
telemetryMetadataStore,
auditFieldMapper,
auditConditionBuilder,
auditAggExprRewriter,
telemetryaudit.DefaultFullTextColumn,
nil,
)
// Create metric statement builder
metricFieldMapper := telemetrymetrics.NewFieldMapper()
metricConditionBuilder := telemetrymetrics.NewConditionBuilder(metricFieldMapper)
@@ -174,7 +148,6 @@ func newProvider(
prometheus,
traceStmtBuilder,
logStmtBuilder,
auditStmtBuilder,
metricStmtBuilder,
meterStmtBuilder,
traceOperatorStmtBuilder,

View File

@@ -208,7 +208,7 @@ func (s *Server) createPublicServer(api *APIHandler, web web.Web) (*http.Server,
s.config.APIServer.Timeout.Default,
s.config.APIServer.Timeout.Max,
).Wrap)
r.Use(middleware.NewAudit(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes, s.signoz.Auditor).Wrap)
r.Use(middleware.NewAudit(s.signoz.Instrumentation.Logger(), s.config.APIServer.Logging.ExcludedRoutes, nil).Wrap)
r.Use(middleware.NewComment().Wrap)
am := middleware.NewAuthZ(s.signoz.Instrumentation.Logger(), s.signoz.Modules.OrgGetter, s.signoz.Authz)

View File

@@ -46,7 +46,6 @@ func prepareQuerierForMetrics(t *testing.T, telemetryStore telemetrystore.Teleme
nil, // prometheus
nil, // traceStmtBuilder
nil, // logStmtBuilder
nil, // auditStmtBuilder
metricStmtBuilder,
nil, // meterStmtBuilder
nil, // traceOperatorStmtBuilder
@@ -92,7 +91,6 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
nil, // prometheus
nil, // traceStmtBuilder
logStmtBuilder, // logStmtBuilder
nil, // auditStmtBuilder
nil, // metricStmtBuilder
nil, // meterStmtBuilder
nil, // traceOperatorStmtBuilder
@@ -133,7 +131,6 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM
nil, // prometheus
traceStmtBuilder, // traceStmtBuilder
nil, // logStmtBuilder
nil, // auditStmtBuilder
nil, // metricStmtBuilder
nil, // meterStmtBuilder
nil, // traceOperatorStmtBuilder

View File

@@ -11,7 +11,6 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/apiserver"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/cache"
"github.com/SigNoz/signoz/pkg/config"
"github.com/SigNoz/signoz/pkg/emailing"
@@ -124,9 +123,6 @@ type Config struct {
// ServiceAccount config
ServiceAccount serviceaccount.Config `mapstructure:"serviceaccount"`
// Auditor config
Auditor auditor.Config `mapstructure:"auditor"`
}
func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.ResolverConfig) (Config, error) {
@@ -157,7 +153,6 @@ func NewConfig(ctx context.Context, logger *slog.Logger, resolverConfig config.R
user.NewConfigFactory(),
identn.NewConfigFactory(),
serviceaccount.NewConfigFactory(),
auditor.NewConfigFactory(),
}
conf, err := config.New(ctx, resolverConfig, configFactories)

View File

@@ -3,8 +3,6 @@ package signoz
import (
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/auditor/noopauditor"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/rulebasednotification"
"github.com/SigNoz/signoz/pkg/alertmanager/signozalertmanager"
"github.com/SigNoz/signoz/pkg/analytics"
@@ -314,12 +312,6 @@ func NewGlobalProviderFactories(identNConfig identn.Config) factory.NamedMap[fac
)
}
func NewAuditorProviderFactories() factory.NamedMap[factory.ProviderFactory[auditor.Auditor, auditor.Config]] {
return factory.MustNewNamedMap(
noopauditor.NewFactory(),
)
}
func NewFlaggerProviderFactories(registry featuretypes.Registry) factory.NamedMap[factory.ProviderFactory[flagger.FlaggerProvider, flagger.Config]] {
return factory.MustNewNamedMap(
configflagger.NewFactory(registry),

View File

@@ -6,7 +6,6 @@ import (
"github.com/SigNoz/signoz/pkg/alertmanager"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager"
"github.com/SigNoz/signoz/pkg/auditor"
"github.com/SigNoz/signoz/pkg/alertmanager/nfmanager/nfroutingstore/sqlroutingstore"
"github.com/SigNoz/signoz/pkg/analytics"
"github.com/SigNoz/signoz/pkg/apiserver"
@@ -34,7 +33,6 @@ import (
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/telemetryaudit"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
@@ -76,7 +74,6 @@ type SigNoz struct {
QueryParser queryparser.QueryParser
Flagger flagger.Flagger
Gateway gateway.Gateway
Auditor auditor.Auditor
}
func New(
@@ -96,7 +93,6 @@ func New(
authzCallback func(context.Context, sqlstore.SQLStore, licensing.Licensing, dashboard.Module) (factory.ProviderFactory[authz.AuthZ, authz.Config], error),
dashboardModuleCallback func(sqlstore.SQLStore, factory.ProviderSettings, analytics.Analytics, organization.Getter, queryparser.QueryParser, querier.Querier, licensing.Licensing) dashboard.Module,
gatewayProviderFactory func(licensing.Licensing) factory.ProviderFactory[gateway.Gateway, gateway.Config],
auditorProviderFactories func(licensing.Licensing) factory.NamedMap[factory.ProviderFactory[auditor.Auditor, auditor.Config]],
querierHandlerCallback func(factory.ProviderSettings, querier.Querier, analytics.Analytics) querier.Handler,
) (*SigNoz, error) {
// Initialize instrumentation
@@ -374,12 +370,6 @@ func New(
return nil, err
}
// Initialize auditor from the variant-specific provider factories
auditor, err := factory.NewProviderFromNamedMap(ctx, providerSettings, config.Auditor, auditorProviderFactories(licensing), config.Auditor.Provider)
if err != nil {
return nil, err
}
// Initialize authns
store := sqlauthnstore.NewStore(sqlstore)
authNs, err := authNsCallback(ctx, providerSettings, store, licensing)
@@ -405,11 +395,6 @@ func New(
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetryaudit.DBName,
telemetryaudit.AuditLogsTableName,
telemetryaudit.TagAttributesTableName,
telemetryaudit.LogAttributeKeysTblName,
telemetryaudit.LogResourceKeysTblName,
telemetrymetadata.DBName,
telemetrymetadata.AttributesMetadataLocalTableName,
telemetrymetadata.ColumnEvolutionMetadataTableName,
@@ -479,7 +464,6 @@ func New(
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
factory.NewNamedService(factory.MustNewName("authz"), authz),
factory.NewNamedService(factory.MustNewName("user"), userService, factory.MustNewName("authz")),
factory.NewNamedService(factory.MustNewName("auditor"), auditor),
)
if err != nil {
return nil, err
@@ -526,6 +510,5 @@ func New(
QueryParser: queryParser,
Flagger: flagger,
Gateway: gateway,
Auditor: auditor,
}, nil
}

View File

@@ -1,200 +0,0 @@
package telemetryaudit
import (
"context"
"fmt"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
)
type conditionBuilder struct {
fm qbtypes.FieldMapper
}
func NewConditionBuilder(fm qbtypes.FieldMapper) *conditionBuilder {
return &conditionBuilder{fm: fm}
}
func (c *conditionBuilder) conditionFor(
ctx context.Context,
startNs, endNs uint64,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
columns, err := c.fm.ColumnFor(ctx, startNs, endNs, key)
if err != nil {
return "", err
}
if operator.IsStringSearchOperator() {
value = querybuilder.FormatValueForContains(value)
}
fieldExpression, err := c.fm.FieldFor(ctx, startNs, endNs, key)
if err != nil {
return "", err
}
fieldExpression, value = querybuilder.DataTypeCollisionHandledFieldName(key, value, fieldExpression, operator)
switch operator {
case qbtypes.FilterOperatorEqual:
return sb.E(fieldExpression, value), nil
case qbtypes.FilterOperatorNotEqual:
return sb.NE(fieldExpression, value), nil
case qbtypes.FilterOperatorGreaterThan:
return sb.G(fieldExpression, value), nil
case qbtypes.FilterOperatorGreaterThanOrEq:
return sb.GE(fieldExpression, value), nil
case qbtypes.FilterOperatorLessThan:
return sb.LT(fieldExpression, value), nil
case qbtypes.FilterOperatorLessThanOrEq:
return sb.LE(fieldExpression, value), nil
case qbtypes.FilterOperatorLike:
return sb.Like(fieldExpression, value), nil
case qbtypes.FilterOperatorNotLike:
return sb.NotLike(fieldExpression, value), nil
case qbtypes.FilterOperatorILike:
return sb.ILike(fieldExpression, value), nil
case qbtypes.FilterOperatorNotILike:
return sb.NotILike(fieldExpression, value), nil
case qbtypes.FilterOperatorContains:
return sb.ILike(fieldExpression, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorNotContains:
return sb.NotILike(fieldExpression, fmt.Sprintf("%%%s%%", value)), nil
case qbtypes.FilterOperatorRegexp:
return fmt.Sprintf(`match(%s, %s)`, sqlbuilder.Escape(fieldExpression), sb.Var(value)), nil
case qbtypes.FilterOperatorNotRegexp:
return fmt.Sprintf(`NOT match(%s, %s)`, sqlbuilder.Escape(fieldExpression), sb.Var(value)), nil
case qbtypes.FilterOperatorBetween:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrBetweenValues
}
if len(values) != 2 {
return "", qbtypes.ErrBetweenValues
}
return sb.Between(fieldExpression, values[0], values[1]), nil
case qbtypes.FilterOperatorNotBetween:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrBetweenValues
}
if len(values) != 2 {
return "", qbtypes.ErrBetweenValues
}
return sb.NotBetween(fieldExpression, values[0], values[1]), nil
case qbtypes.FilterOperatorIn:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrInValues
}
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.E(fieldExpression, value))
}
return sb.Or(conditions...), nil
case qbtypes.FilterOperatorNotIn:
values, ok := value.([]any)
if !ok {
return "", qbtypes.ErrInValues
}
conditions := []string{}
for _, value := range values {
conditions = append(conditions, sb.NE(fieldExpression, value))
}
return sb.And(conditions...), nil
case qbtypes.FilterOperatorExists, qbtypes.FilterOperatorNotExists:
var value any
column := columns[0]
switch column.Type.GetType() {
case schema.ColumnTypeEnumJSON:
if operator == qbtypes.FilterOperatorExists {
return sb.IsNotNull(fieldExpression), nil
}
return sb.IsNull(fieldExpression), nil
case schema.ColumnTypeEnumLowCardinality:
switch elementType := column.Type.(schema.LowCardinalityColumnType).ElementType; elementType.GetType() {
case schema.ColumnTypeEnumString:
value = ""
if operator == qbtypes.FilterOperatorExists {
return sb.NE(fieldExpression, value), nil
}
return sb.E(fieldExpression, value), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for low cardinality column type %s", elementType)
}
case schema.ColumnTypeEnumString:
value = ""
if operator == qbtypes.FilterOperatorExists {
return sb.NE(fieldExpression, value), nil
}
return sb.E(fieldExpression, value), nil
case schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
value = 0
if operator == qbtypes.FilterOperatorExists {
return sb.NE(fieldExpression, value), nil
}
return sb.E(fieldExpression, value), nil
case schema.ColumnTypeEnumMap:
keyType := column.Type.(schema.MapColumnType).KeyType
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
}
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
leftOperand := fmt.Sprintf("mapContains(%s, '%s')", column.Name, key.Name)
if key.Materialized {
leftOperand = telemetrytypes.FieldKeyToMaterializedColumnNameForExists(key)
}
if operator == qbtypes.FilterOperatorExists {
return sb.E(leftOperand, true), nil
}
return sb.NE(leftOperand, true), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for map column type %s", valueType)
}
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "exists operator is not supported for column type %s", column.Type)
}
}
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported operator: %v", operator)
}
func (c *conditionBuilder) ConditionFor(
ctx context.Context,
startNs uint64,
endNs uint64,
key *telemetrytypes.TelemetryFieldKey,
operator qbtypes.FilterOperator,
value any,
sb *sqlbuilder.SelectBuilder,
) (string, error) {
condition, err := c.conditionFor(ctx, startNs, endNs, key, operator, value, sb)
if err != nil {
return "", err
}
if key.FieldContext == telemetrytypes.FieldContextLog || key.FieldContext == telemetrytypes.FieldContextScope {
return condition, nil
}
if operator.AddDefaultExistsFilter() {
existsCondition, err := c.conditionFor(ctx, startNs, endNs, key, qbtypes.FilterOperatorExists, nil, sb)
if err != nil {
return "", err
}
return sb.And(condition, existsCondition), nil
}
return condition, nil
}

View File

@@ -1,129 +0,0 @@
package telemetryaudit
import (
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
const (
// Internal Columns.
IDColumn = "id"
TimestampBucketStartColumn = "ts_bucket_start"
ResourceFingerPrintColumn = "resource_fingerprint"
// Intrinsic Columns.
TimestampColumn = "timestamp"
ObservedTimestampColumn = "observed_timestamp"
BodyColumn = "body"
EventNameColumn = "event_name"
TraceIDColumn = "trace_id"
SpanIDColumn = "span_id"
TraceFlagsColumn = "trace_flags"
SeverityTextColumn = "severity_text"
SeverityNumberColumn = "severity_number"
ScopeNameColumn = "scope_name"
ScopeVersionColumn = "scope_version"
// Contextual Columns.
AttributesStringColumn = "attributes_string"
AttributesNumberColumn = "attributes_number"
AttributesBoolColumn = "attributes_bool"
ResourceColumn = "resource"
ScopeStringColumn = "scope_string"
)
var (
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
Name: "body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
}
IntrinsicFields = map[string]telemetrytypes.TelemetryFieldKey{
"body": {
Name: "body",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"trace_id": {
Name: "trace_id",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"span_id": {
Name: "span_id",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"trace_flags": {
Name: "trace_flags",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"severity_text": {
Name: "severity_text",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
"severity_number": {
Name: "severity_number",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
"event_name": {
Name: "event_name",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
}
DefaultSortingOrder = []qbtypes.OrderBy{
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: TimestampColumn,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
{
Key: qbtypes.OrderByKey{
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{
Name: IDColumn,
},
},
Direction: qbtypes.OrderDirectionDesc,
},
}
)
var auditLogColumns = map[string]*schema.Column{
"ts_bucket_start": {Name: "ts_bucket_start", Type: schema.ColumnTypeUInt64},
"resource_fingerprint": {Name: "resource_fingerprint", Type: schema.ColumnTypeString},
"timestamp": {Name: "timestamp", Type: schema.ColumnTypeUInt64},
"observed_timestamp": {Name: "observed_timestamp", Type: schema.ColumnTypeUInt64},
"id": {Name: "id", Type: schema.ColumnTypeString},
"trace_id": {Name: "trace_id", Type: schema.ColumnTypeString},
"span_id": {Name: "span_id", Type: schema.ColumnTypeString},
"trace_flags": {Name: "trace_flags", Type: schema.ColumnTypeUInt32},
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
"body": {Name: "body", Type: schema.ColumnTypeString},
"attributes_string": {Name: "attributes_string", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeString}},
"attributes_number": {Name: "attributes_number", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeFloat64}},
"attributes_bool": {Name: "attributes_bool", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeBool}},
"resource": {Name: "resource", Type: schema.JSONColumnType{}},
"event_name": {Name: "event_name", Type: schema.ColumnTypeString},
"scope_name": {Name: "scope_name", Type: schema.ColumnTypeString},
"scope_version": {Name: "scope_version", Type: schema.ColumnTypeString},
"scope_string": {Name: "scope_string", Type: schema.MapColumnType{KeyType: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}, ValueType: schema.ColumnTypeString}},
}

View File

@@ -1,124 +0,0 @@
package telemetryaudit
import (
"context"
"fmt"
schema "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
"github.com/SigNoz/signoz/pkg/errors"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
"golang.org/x/exp/maps"
)
type fieldMapper struct{}
func NewFieldMapper() qbtypes.FieldMapper {
return &fieldMapper{}
}
func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
switch key.FieldContext {
case telemetrytypes.FieldContextResource:
return []*schema.Column{auditLogColumns["resource"]}, nil
case telemetrytypes.FieldContextScope:
switch key.Name {
case "name", "scope.name", "scope_name":
return []*schema.Column{auditLogColumns["scope_name"]}, nil
case "version", "scope.version", "scope_version":
return []*schema.Column{auditLogColumns["scope_version"]}, nil
}
return []*schema.Column{auditLogColumns["scope_string"]}, nil
case telemetrytypes.FieldContextAttribute:
switch key.FieldDataType {
case telemetrytypes.FieldDataTypeString:
return []*schema.Column{auditLogColumns["attributes_string"]}, nil
case telemetrytypes.FieldDataTypeInt64, telemetrytypes.FieldDataTypeFloat64, telemetrytypes.FieldDataTypeNumber:
return []*schema.Column{auditLogColumns["attributes_number"]}, nil
case telemetrytypes.FieldDataTypeBool:
return []*schema.Column{auditLogColumns["attributes_bool"]}, nil
}
case telemetrytypes.FieldContextLog, telemetrytypes.FieldContextUnspecified:
col, ok := auditLogColumns[key.Name]
if !ok {
return nil, qbtypes.ErrColumnNotFound
}
return []*schema.Column{col}, nil
}
return nil, qbtypes.ErrColumnNotFound
}
func (m *fieldMapper) FieldFor(ctx context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) (string, error) {
columns, err := m.getColumn(ctx, key)
if err != nil {
return "", err
}
if len(columns) != 1 {
return "", errors.Newf(errors.TypeInternal, errors.CodeInternal, "expected exactly 1 column, got %d", len(columns))
}
column := columns[0]
switch column.Type.GetType() {
case schema.ColumnTypeEnumJSON:
if key.FieldContext != telemetrytypes.FieldContextResource {
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource context fields are supported for json columns in audit, got %s", key.FieldContext.String)
}
return fmt.Sprintf("%s.`%s`::String", column.Name, key.Name), nil
case schema.ColumnTypeEnumLowCardinality:
return column.Name, nil
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumUInt64, schema.ColumnTypeEnumUInt32, schema.ColumnTypeEnumUInt8:
return column.Name, nil
case schema.ColumnTypeEnumMap:
keyType := column.Type.(schema.MapColumnType).KeyType
if _, ok := keyType.(schema.LowCardinalityColumnType); !ok {
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "key type %s is not supported for map column type %s", keyType, column.Type)
}
switch valueType := column.Type.(schema.MapColumnType).ValueType; valueType.GetType() {
case schema.ColumnTypeEnumString, schema.ColumnTypeEnumBool, schema.ColumnTypeEnumFloat64:
if key.Materialized {
return telemetrytypes.FieldKeyToMaterializedColumnName(key), nil
}
return fmt.Sprintf("%s['%s']", column.Name, key.Name), nil
default:
return "", errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported map value type %s", valueType)
}
}
return column.Name, nil
}
func (m *fieldMapper) ColumnFor(ctx context.Context, _, _ uint64, key *telemetrytypes.TelemetryFieldKey) ([]*schema.Column, error) {
return m.getColumn(ctx, key)
}
func (m *fieldMapper) ColumnExpressionFor(
ctx context.Context,
tsStart, tsEnd uint64,
field *telemetrytypes.TelemetryFieldKey,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
) (string, error) {
fieldExpression, err := m.FieldFor(ctx, tsStart, tsEnd, field)
if errors.Is(err, qbtypes.ErrColumnNotFound) {
keysForField := keys[field.Name]
if len(keysForField) == 0 {
if _, ok := auditLogColumns[field.Name]; ok {
field.FieldContext = telemetrytypes.FieldContextLog
fieldExpression, _ = m.FieldFor(ctx, tsStart, tsEnd, field)
} else {
correction, found := telemetrytypes.SuggestCorrection(field.Name, maps.Keys(keys))
if found {
return "", errors.Wrap(err, errors.TypeInvalidInput, errors.CodeInvalidInput, correction)
}
return "", errors.Wrapf(err, errors.TypeInvalidInput, errors.CodeInvalidInput, "field `%s` not found", field.Name)
}
} else {
fieldExpression, _ = m.FieldFor(ctx, tsStart, tsEnd, keysForField[0])
}
}
return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(fieldExpression), field.Name), nil
}

View File

@@ -1,612 +0,0 @@
package telemetryaudit
import (
"context"
"fmt"
"log/slog"
"strings"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
)
type auditQueryStatementBuilder struct {
logger *slog.Logger
metadataStore telemetrytypes.MetadataStore
fm qbtypes.FieldMapper
cb qbtypes.ConditionBuilder
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation]
aggExprRewriter qbtypes.AggExprRewriter
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
}
var _ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*auditQueryStatementBuilder)(nil)
func NewAuditQueryStatementBuilder(
settings factory.ProviderSettings,
metadataStore telemetrytypes.MetadataStore,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
aggExprRewriter qbtypes.AggExprRewriter,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *auditQueryStatementBuilder {
auditSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetryaudit")
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.LogAggregation](
settings,
DBName,
LogsResourceTableName,
telemetrytypes.SignalLogs,
telemetrytypes.SourceAudit,
metadataStore,
fullTextColumn,
jsonKeyToKey,
)
return &auditQueryStatementBuilder{
logger: auditSettings.Logger(),
metadataStore: metadataStore,
fm: fieldMapper,
cb: conditionBuilder,
resourceFilterStmtBuilder: resourceFilterStmtBuilder,
aggExprRewriter: aggExprRewriter,
fullTextColumn: fullTextColumn,
jsonKeyToKey: jsonKeyToKey,
}
}
func (b *auditQueryStatementBuilder) Build(
ctx context.Context,
start uint64,
end uint64,
requestType qbtypes.RequestType,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
start = querybuilder.ToNanoSecs(start)
end = querybuilder.ToNanoSecs(end)
keySelectors := getKeySelectors(query)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)
if err != nil {
return nil, err
}
query = b.adjustKeys(ctx, keys, query, requestType)
q := sqlbuilder.NewSelectBuilder()
var stmt *qbtypes.Statement
switch requestType {
case qbtypes.RequestTypeRaw, qbtypes.RequestTypeRawStream:
stmt, err = b.buildListQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeTimeSeries:
stmt, err = b.buildTimeSeriesQuery(ctx, q, query, start, end, keys, variables)
case qbtypes.RequestTypeScalar:
stmt, err = b.buildScalarQuery(ctx, q, query, start, end, keys, false, variables)
default:
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported request type: %s", requestType)
}
if err != nil {
return nil, err
}
return stmt, nil
}
func getKeySelectors(query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) []*telemetrytypes.FieldKeySelector {
var keySelectors []*telemetrytypes.FieldKeySelector
for idx := range query.Aggregations {
aggExpr := query.Aggregations[idx]
selectors := querybuilder.QueryStringToKeysSelectors(aggExpr.Expression)
keySelectors = append(keySelectors, selectors...)
}
if query.Filter != nil && query.Filter.Expression != "" {
whereClauseSelectors := querybuilder.QueryStringToKeysSelectors(query.Filter.Expression)
keySelectors = append(keySelectors, whereClauseSelectors...)
}
for idx := range query.GroupBy {
groupBy := query.GroupBy[idx]
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: groupBy.Name,
Signal: telemetrytypes.SignalLogs,
FieldContext: groupBy.FieldContext,
FieldDataType: groupBy.FieldDataType,
})
}
for idx := range query.SelectFields {
selectField := query.SelectFields[idx]
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: selectField.Name,
Signal: telemetrytypes.SignalLogs,
FieldContext: selectField.FieldContext,
FieldDataType: selectField.FieldDataType,
})
}
for idx := range query.Order {
keySelectors = append(keySelectors, &telemetrytypes.FieldKeySelector{
Name: query.Order[idx].Key.Name,
Signal: telemetrytypes.SignalLogs,
FieldContext: query.Order[idx].Key.FieldContext,
FieldDataType: query.Order[idx].Key.FieldDataType,
})
}
for idx := range keySelectors {
keySelectors[idx].Signal = telemetrytypes.SignalLogs
keySelectors[idx].Source = telemetrytypes.SourceAudit
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
}
return keySelectors
}
func (b *auditQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[string][]*telemetrytypes.TelemetryFieldKey, query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation], requestType qbtypes.RequestType) qbtypes.QueryBuilderQuery[qbtypes.LogAggregation] {
keys["id"] = append([]*telemetrytypes.TelemetryFieldKey{{
Name: "id",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
}}, keys["id"]...)
keys["timestamp"] = append([]*telemetrytypes.TelemetryFieldKey{{
Name: "timestamp",
Signal: telemetrytypes.SignalLogs,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
}}, keys["timestamp"]...)
actions := querybuilder.AdjustKeysForAliasExpressions(&query, requestType)
actions = append(actions, querybuilder.AdjustDuplicateKeys(&query)...)
for idx := range query.SelectFields {
actions = append(actions, b.adjustKey(&query.SelectFields[idx], keys)...)
}
for idx := range query.GroupBy {
actions = append(actions, b.adjustKey(&query.GroupBy[idx].TelemetryFieldKey, keys)...)
}
for idx := range query.Order {
actions = append(actions, b.adjustKey(&query.Order[idx].Key.TelemetryFieldKey, keys)...)
}
for _, action := range actions {
b.logger.InfoContext(ctx, "key adjustment action", slog.String("action", action))
}
return query
}
func (b *auditQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
if _, ok := IntrinsicFields[key.Name]; ok {
intrinsicField := IntrinsicFields[key.Name]
return querybuilder.AdjustKey(key, keys, &intrinsicField)
}
return querybuilder.AdjustKey(key, keys, nil)
}
func (b *auditQueryStatementBuilder) buildListQuery(
ctx context.Context,
sb *sqlbuilder.SelectBuilder,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
var (
cteFragments []string
cteArgs [][]any
)
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err
} else if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
sb.Select(TimestampColumn)
sb.SelectMore(IDColumn)
if len(query.SelectFields) == 0 {
sb.SelectMore(TraceIDColumn)
sb.SelectMore(SpanIDColumn)
sb.SelectMore(TraceFlagsColumn)
sb.SelectMore(SeverityTextColumn)
sb.SelectMore(SeverityNumberColumn)
sb.SelectMore(ScopeNameColumn)
sb.SelectMore(ScopeVersionColumn)
sb.SelectMore(BodyColumn)
sb.SelectMore(EventNameColumn)
sb.SelectMore(AttributesStringColumn)
sb.SelectMore(AttributesNumberColumn)
sb.SelectMore(AttributesBoolColumn)
sb.SelectMore(ResourceColumn)
sb.SelectMore(ScopeStringColumn)
} else {
for index := range query.SelectFields {
if query.SelectFields[index].Name == TimestampColumn || query.SelectFields[index].Name == IDColumn {
continue
}
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
if err != nil {
return nil, err
}
sb.SelectMore(colExpr)
}
}
sb.From(fmt.Sprintf("%s.%s", DBName, AuditLogsTableName))
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
for _, orderBy := range query.Order {
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &orderBy.Key.TelemetryFieldKey, keys)
if err != nil {
return nil, err
}
sb.OrderBy(fmt.Sprintf("%s %s", colExpr, orderBy.Direction.StringValue()))
}
if query.Limit > 0 {
sb.Limit(query.Limit)
} else {
sb.Limit(100)
}
if query.Offset > 0 {
sb.Offset(query.Offset)
}
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
stmt := &qbtypes.Statement{
Query: finalSQL,
Args: finalArgs,
}
if preparedWhereClause != nil {
stmt.Warnings = preparedWhereClause.Warnings
stmt.WarningsDocURL = preparedWhereClause.WarningsDocURL
}
return stmt, nil
}
func (b *auditQueryStatementBuilder) buildTimeSeriesQuery(
ctx context.Context,
sb *sqlbuilder.SelectBuilder,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
var (
cteFragments []string
cteArgs [][]any
)
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err
} else if frag != "" {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
sb.SelectMore(fmt.Sprintf(
"toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL %d SECOND) AS ts",
int64(query.StepInterval.Seconds()),
))
var allGroupByArgs []any
fieldNames := make([]string, 0, len(query.GroupBy))
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.Name))
}
allAggChArgs := make([]any, 0)
for i, agg := range query.Aggregations {
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, start, end, agg.Expression, uint64(query.StepInterval.Seconds()), keys)
if err != nil {
return nil, err
}
allAggChArgs = append(allAggChArgs, chArgs...)
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, i))
}
sb.From(fmt.Sprintf("%s.%s", DBName, AuditLogsTableName))
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
var finalSQL string
var finalArgs []any
if query.Limit > 0 && len(query.GroupBy) > 0 {
cteSB := sqlbuilder.NewSelectBuilder()
cteStmt, err := b.buildScalarQuery(ctx, cteSB, query, start, end, keys, true, variables)
if err != nil {
return nil, err
}
cteFragments = append(cteFragments, fmt.Sprintf("__limit_cte AS (%s)", cteStmt.Query))
cteArgs = append(cteArgs, cteStmt.Args)
tuple := fmt.Sprintf("(%s)", strings.Join(fieldNames, ", "))
sb.Where(fmt.Sprintf("%s GLOBAL IN (SELECT %s FROM __limit_cte)", tuple, strings.Join(fieldNames, ", ")))
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
if err != nil {
return nil, err
}
sb.Having(rewrittenExpr)
}
if len(query.Order) != 0 {
for _, orderBy := range query.Order {
_, ok := aggOrderBy(orderBy, query)
if !ok {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
}
}
sb.OrderBy("ts desc")
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
} else {
sb.GroupBy("ts")
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
if err != nil {
return nil, err
}
sb.Having(rewrittenExpr)
}
if len(query.Order) != 0 {
for _, orderBy := range query.Order {
_, ok := aggOrderBy(orderBy, query)
if !ok {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
}
}
sb.OrderBy("ts desc")
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
finalSQL = querybuilder.CombineCTEs(cteFragments) + mainSQL
finalArgs = querybuilder.PrependArgs(cteArgs, mainArgs)
}
stmt := &qbtypes.Statement{
Query: finalSQL,
Args: finalArgs,
}
if preparedWhereClause != nil {
stmt.Warnings = preparedWhereClause.Warnings
stmt.WarningsDocURL = preparedWhereClause.WarningsDocURL
}
return stmt, nil
}
func (b *auditQueryStatementBuilder) buildScalarQuery(
ctx context.Context,
sb *sqlbuilder.SelectBuilder,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64,
keys map[string][]*telemetrytypes.TelemetryFieldKey,
skipResourceCTE bool,
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
var (
cteFragments []string
cteArgs [][]any
)
if frag, args, err := b.maybeAttachResourceFilter(ctx, sb, query, start, end, variables); err != nil {
return nil, err
} else if frag != "" && !skipResourceCTE {
cteFragments = append(cteFragments, frag)
cteArgs = append(cteArgs, args)
}
allAggChArgs := []any{}
var allGroupByArgs []any
for _, gb := range query.GroupBy {
expr, args, err := querybuilder.CollisionHandledFinalExpr(ctx, start, end, &gb.TelemetryFieldKey, b.fm, b.cb, keys, telemetrytypes.FieldDataTypeString, b.jsonKeyToKey)
if err != nil {
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
}
rateInterval := (end - start) / querybuilder.NsToSeconds
if len(query.Aggregations) > 0 {
for idx := range query.Aggregations {
aggExpr := query.Aggregations[idx]
rewritten, chArgs, err := b.aggExprRewriter.Rewrite(ctx, start, end, aggExpr.Expression, rateInterval, keys)
if err != nil {
return nil, err
}
allAggChArgs = append(allAggChArgs, chArgs...)
sb.SelectMore(fmt.Sprintf("%s AS __result_%d", rewritten, idx))
}
}
sb.From(fmt.Sprintf("%s.%s", DBName, AuditLogsTableName))
preparedWhereClause, err := b.addFilterCondition(ctx, sb, start, end, query, keys, variables)
if err != nil {
return nil, err
}
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
if err != nil {
return nil, err
}
sb.Having(rewrittenExpr)
}
for _, orderBy := range query.Order {
idx, ok := aggOrderBy(orderBy, query)
if ok {
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
} else {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
}
}
if len(query.Order) == 0 {
sb.OrderBy("__result_0 DESC")
}
if query.Limit > 0 {
sb.Limit(query.Limit)
}
combinedArgs := append(allGroupByArgs, allAggChArgs...)
mainSQL, mainArgs := sb.BuildWithFlavor(sqlbuilder.ClickHouse, combinedArgs...)
finalSQL := querybuilder.CombineCTEs(cteFragments) + mainSQL
finalArgs := querybuilder.PrependArgs(cteArgs, mainArgs)
stmt := &qbtypes.Statement{
Query: finalSQL,
Args: finalArgs,
}
if preparedWhereClause != nil {
stmt.Warnings = preparedWhereClause.Warnings
stmt.WarningsDocURL = preparedWhereClause.WarningsDocURL
}
return stmt, nil
}
func (b *auditQueryStatementBuilder) addFilterCondition(
ctx context.Context,
sb *sqlbuilder.SelectBuilder,
start, end uint64,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
keys map[string][]*telemetrytypes.TelemetryFieldKey,
variables map[string]qbtypes.VariableItem,
) (*querybuilder.PreparedWhereClause, error) {
var preparedWhereClause *querybuilder.PreparedWhereClause
var err error
if query.Filter != nil && query.Filter.Expression != "" {
preparedWhereClause, err = querybuilder.PrepareWhereClause(query.Filter.Expression, querybuilder.FilterExprVisitorOpts{
Context: ctx,
Logger: b.logger,
FieldMapper: b.fm,
ConditionBuilder: b.cb,
FieldKeys: keys,
SkipResourceFilter: true,
FullTextColumn: b.fullTextColumn,
JsonKeyToKey: b.jsonKeyToKey,
Variables: variables,
StartNs: start,
EndNs: end,
})
if err != nil {
return nil, err
}
}
if preparedWhereClause != nil {
sb.AddWhereClause(preparedWhereClause.WhereClause)
}
startBucket := start/querybuilder.NsToSeconds - querybuilder.BucketAdjustment
var endBucket uint64
if end != 0 {
endBucket = end / querybuilder.NsToSeconds
}
if start != 0 {
sb.Where(sb.GE("timestamp", fmt.Sprintf("%d", start)), sb.GE("ts_bucket_start", startBucket))
}
if end != 0 {
sb.Where(sb.L("timestamp", fmt.Sprintf("%d", end)), sb.LE("ts_bucket_start", endBucket))
}
return preparedWhereClause, nil
}
func aggOrderBy(k qbtypes.OrderBy, q qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]) (int, bool) {
for i, agg := range q.Aggregations {
if k.Key.Name == agg.Alias || k.Key.Name == agg.Expression || k.Key.Name == fmt.Sprintf("%d", i) {
return i, true
}
}
return 0, false
}
func (b *auditQueryStatementBuilder) maybeAttachResourceFilter(
ctx context.Context,
sb *sqlbuilder.SelectBuilder,
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation],
start, end uint64,
variables map[string]qbtypes.VariableItem,
) (cteSQL string, cteArgs []any, err error) {
stmt, err := b.resourceFilterStmtBuilder.Build(ctx, start, end, qbtypes.RequestTypeRaw, query, variables)
if err != nil {
return "", nil, err
}
sb.Where("resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter)")
return fmt.Sprintf("__resource_filter AS (%s)", stmt.Query), stmt.Args, nil
}

View File

@@ -1,223 +0,0 @@
package telemetryaudit
import (
"context"
"testing"
"time"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/stretchr/testify/require"
)
func auditFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
key := func(name string, ctx telemetrytypes.FieldContext, dt telemetrytypes.FieldDataType, materialized bool) *telemetrytypes.TelemetryFieldKey {
return &telemetrytypes.TelemetryFieldKey{
Name: name,
Signal: telemetrytypes.SignalLogs,
FieldContext: ctx,
FieldDataType: dt,
Materialized: materialized,
}
}
attr := telemetrytypes.FieldContextAttribute
res := telemetrytypes.FieldContextResource
str := telemetrytypes.FieldDataTypeString
i64 := telemetrytypes.FieldDataTypeInt64
return map[string][]*telemetrytypes.TelemetryFieldKey{
"service.name": {key("service.name", res, str, false)},
"signoz.audit.action": {key("signoz.audit.action", attr, str, true)},
"signoz.audit.outcome": {key("signoz.audit.outcome", attr, str, true)},
"signoz.audit.principal.email": {key("signoz.audit.principal.email", attr, str, true)},
"signoz.audit.principal.id": {key("signoz.audit.principal.id", attr, str, true)},
"signoz.audit.principal.type": {key("signoz.audit.principal.type", attr, str, true)},
"signoz.audit.resource.kind": {key("signoz.audit.resource.kind", res, str, false)},
"signoz.audit.resource.id": {key("signoz.audit.resource.id", res, str, false)},
"signoz.audit.action_category": {key("signoz.audit.action_category", attr, str, false)},
"signoz.audit.error.type": {key("signoz.audit.error.type", attr, str, false)},
"signoz.audit.error.code": {key("signoz.audit.error.code", attr, str, false)},
"http.request.method": {key("http.request.method", attr, str, false)},
"http.response.status_code": {key("http.response.status_code", attr, i64, false)},
}
}
func newTestAuditStatementBuilder() *auditQueryStatementBuilder {
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = auditFieldKeyMap()
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
return NewAuditQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
aggExprRewriter,
DefaultFullTextColumn,
nil,
)
}
func TestStatementBuilder(t *testing.T) {
statementBuilder := newTestAuditStatementBuilder()
ctx := context.Background()
testCases := []struct {
name string
requestType qbtypes.RequestType
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
expected qbtypes.Statement
expectedErr error
}{
// List: all actions by a specific user (materialized principal.id filter)
{
name: "ListByPrincipalID",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
Filter: &qbtypes.Filter{
Expression: "signoz.audit.principal.id = '019a-1234-abcd-5678'",
},
Limit: 100,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$principal$$id` = ? AND `attribute_string_signoz$$audit$$principal$$id_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "019a-1234-abcd-5678", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
},
},
// List: all failed actions (materialized outcome filter)
{
name: "ListByOutcomeFailure",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
Filter: &qbtypes.Filter{
Expression: "signoz.audit.outcome = 'failure'",
},
Limit: 100,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "failure", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
},
},
// List: change history of a specific dashboard (two materialized column AND)
{
name: "ListByResourceKindAndID",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
Filter: &qbtypes.Filter{
Expression: "signoz.audit.resource.kind = 'dashboard' AND signoz.audit.resource.id = '019b-5678-efgh-9012'",
},
Limit: 100,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE ((simpleJSONExtractString(labels, 'signoz.audit.resource.kind') = ? AND labels LIKE ? AND labels LIKE ?) AND (simpleJSONExtractString(labels, 'signoz.audit.resource.id') = ? AND labels LIKE ? AND labels LIKE ?)) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{"dashboard", "%signoz.audit.resource.kind%", "%signoz.audit.resource.kind\":\"dashboard%", "019b-5678-efgh-9012", "%signoz.audit.resource.id%", "%signoz.audit.resource.id\":\"019b-5678-efgh-9012%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
},
},
// List: all dashboard deletions (compliance — resource.kind + action AND)
{
name: "ListByResourceKindAndAction",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
Filter: &qbtypes.Filter{
Expression: "signoz.audit.resource.kind = 'dashboard' AND signoz.audit.action = 'delete'",
},
Limit: 100,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE (simpleJSONExtractString(labels, 'signoz.audit.resource.kind') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$action` = ? AND `attribute_string_signoz$$audit$$action_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{"dashboard", "%signoz.audit.resource.kind%", "%signoz.audit.resource.kind\":\"dashboard%", uint64(1747945619), uint64(1747983448), "delete", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
},
},
// List: all actions by service accounts (materialized principal.type)
{
name: "ListByPrincipalType",
requestType: qbtypes.RequestTypeRaw,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
Filter: &qbtypes.Filter{
Expression: "signoz.audit.principal.type = 'service_account'",
},
Limit: 100,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, event_name, attributes_string, attributes_number, attributes_bool, resource, scope_string FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$principal$$type` = ? AND `attribute_string_signoz$$audit$$principal$$type_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "service_account", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 100},
},
},
// Scalar: alert — count forbidden errors (outcome + action AND)
{
name: "ScalarCountByOutcomeAndAction",
requestType: qbtypes.RequestTypeScalar,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
Filter: &qbtypes.Filter{
Expression: "signoz.audit.outcome = 'failure' AND signoz.audit.action = 'update'",
},
Aggregations: []qbtypes.LogAggregation{
{Expression: "count()"},
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT count() AS __result_0 FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND (`attribute_string_signoz$$audit$$action` = ? AND `attribute_string_signoz$$audit$$action_exists` = ?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY __result_0 DESC",
Args: []any{uint64(1747945619), uint64(1747983448), "failure", true, "update", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
},
},
// TimeSeries: failures grouped by principal email with top-N limit
{
name: "TimeSeriesFailuresGroupedByPrincipal",
requestType: qbtypes.RequestTypeTimeSeries,
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
Signal: telemetrytypes.SignalLogs,
Source: telemetrytypes.SourceAudit,
StepInterval: qbtypes.Step{Duration: 60 * time.Second},
Aggregations: []qbtypes.LogAggregation{
{Expression: "count()"},
},
Filter: &qbtypes.Filter{
Expression: "signoz.audit.outcome = 'failure'",
},
GroupBy: []qbtypes.GroupByKey{
{TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: "signoz.audit.principal.email"}},
},
Limit: 5,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_audit.distributed_logs_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_signoz$$audit$$principal$$email_exists` = ?, `attribute_string_signoz$$audit$$principal$$email`, NULL)) AS `signoz.audit.principal.email`, count() AS __result_0 FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `signoz.audit.principal.email` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 60 SECOND) AS ts, toString(multiIf(`attribute_string_signoz$$audit$$principal$$email_exists` = ?, `attribute_string_signoz$$audit$$principal$$email`, NULL)) AS `signoz.audit.principal.email`, count() AS __result_0 FROM signoz_audit.distributed_logs WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (`attribute_string_signoz$$audit$$outcome` = ? AND `attribute_string_signoz$$audit$$outcome_exists` = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`signoz.audit.principal.email`) GLOBAL IN (SELECT `signoz.audit.principal.email` FROM __limit_cte) GROUP BY ts, `signoz.audit.principal.email`",
Args: []any{uint64(1747945619), uint64(1747983448), true, "failure", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 5, true, "failure", true, "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448)},
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
q, err := statementBuilder.Build(ctx, 1747947419000, 1747983448000, testCase.requestType, testCase.query, nil)
if testCase.expectedErr != nil {
require.Error(t, err)
require.Contains(t, err.Error(), testCase.expectedErr.Error())
} else {
require.NoError(t, err)
require.Equal(t, testCase.expected.Query, q.Query)
require.Equal(t, testCase.expected.Args, q.Args)
}
})
}
}

View File

@@ -1,12 +0,0 @@
package telemetryaudit
const (
DBName = "signoz_audit"
AuditLogsTableName = "distributed_logs"
AuditLogsLocalTableName = "logs"
TagAttributesTableName = "distributed_tag_attributes"
TagAttributesLocalTableName = "tag_attributes"
LogAttributeKeysTblName = "distributed_logs_attribute_keys"
LogResourceKeysTblName = "distributed_logs_resource_keys"
LogsResourceTableName = "distributed_logs_resource"
)

View File

@@ -45,7 +45,6 @@ func NewLogQueryStatementBuilder(
DBName,
LogsResourceV2TableName,
telemetrytypes.SignalLogs,
telemetrytypes.SourceUnspecified,
metadataStore,
fullTextColumn,
jsonKeyToKey,

View File

@@ -13,7 +13,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetryaudit"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
@@ -28,7 +27,6 @@ import (
var (
ErrFailedToGetTracesKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get traces keys")
ErrFailedToGetLogsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get logs keys")
ErrFailedToGetAuditKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get audit keys")
ErrFailedToGetTblStatement = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get tbl statement")
ErrFailedToGetMetricsKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get metrics keys")
ErrFailedToGetMeterKeys = errors.Newf(errors.TypeInternal, errors.CodeInternal, "failed to get meter keys")
@@ -52,11 +50,6 @@ type telemetryMetaStore struct {
logAttributeKeysTblName string
logResourceKeysTblName string
logsV2TblName string
auditDBName string
auditLogsTblName string
auditFieldsTblName string
auditAttributeKeysTblName string
auditResourceKeysTblName string
relatedMetadataDBName string
relatedMetadataTblName string
columnEvolutionMetadataTblName string
@@ -86,11 +79,6 @@ func NewTelemetryMetaStore(
logsFieldsTblName string,
logAttributeKeysTblName string,
logResourceKeysTblName string,
auditDBName string,
auditLogsTblName string,
auditFieldsTblName string,
auditAttributeKeysTblName string,
auditResourceKeysTblName string,
relatedMetadataDBName string,
relatedMetadataTblName string,
columnEvolutionMetadataTblName string,
@@ -113,11 +101,6 @@ func NewTelemetryMetaStore(
logsFieldsTblName: logsFieldsTblName,
logAttributeKeysTblName: logAttributeKeysTblName,
logResourceKeysTblName: logResourceKeysTblName,
auditDBName: auditDBName,
auditLogsTblName: auditLogsTblName,
auditFieldsTblName: auditFieldsTblName,
auditAttributeKeysTblName: auditAttributeKeysTblName,
auditResourceKeysTblName: auditResourceKeysTblName,
relatedMetadataDBName: relatedMetadataDBName,
relatedMetadataTblName: relatedMetadataTblName,
columnEvolutionMetadataTblName: columnEvolutionMetadataTblName,
@@ -609,227 +592,6 @@ func (t *telemetryMetaStore) getLogsKeys(ctx context.Context, fieldKeySelectors
return keys, complete, nil
}
func (t *telemetryMetaStore) auditTblStatementToFieldKeys(ctx context.Context) ([]*telemetrytypes.TelemetryFieldKey, error) {
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
instrumentationtypes.CodeNamespace: "metadata",
instrumentationtypes.CodeFunctionName: "auditTblStatementToFieldKeys",
})
query := fmt.Sprintf("SHOW CREATE TABLE %s.%s", t.auditDBName, t.auditLogsTblName)
statements := []telemetrytypes.ShowCreateTableStatement{}
err := t.telemetrystore.ClickhouseDB().Select(ctx, &statements, query)
if err != nil {
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetTblStatement.Error())
}
materialisedKeys, err := ExtractFieldKeysFromTblStatement(statements[0].Statement)
if err != nil {
return nil, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
}
for idx := range materialisedKeys {
materialisedKeys[idx].Signal = telemetrytypes.SignalLogs
}
return materialisedKeys, nil
}
func (t *telemetryMetaStore) getAuditKeys(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) ([]*telemetrytypes.TelemetryFieldKey, bool, error) {
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
instrumentationtypes.CodeNamespace: "metadata",
instrumentationtypes.CodeFunctionName: "getAuditKeys",
})
if len(fieldKeySelectors) == 0 {
return nil, true, nil
}
matKeys, err := t.auditTblStatementToFieldKeys(ctx)
if err != nil {
return nil, false, err
}
mapOfKeys := make(map[string]*telemetrytypes.TelemetryFieldKey)
for _, key := range matKeys {
mapOfKeys[key.Name+";"+key.FieldContext.StringValue()+";"+key.FieldDataType.StringValue()] = key
}
var queries []string
var allArgs []any
queryAttributeTable := false
queryResourceTable := false
for _, selector := range fieldKeySelectors {
if selector.FieldContext == telemetrytypes.FieldContextUnspecified {
queryAttributeTable = true
queryResourceTable = true
break
} else if selector.FieldContext == telemetrytypes.FieldContextAttribute {
queryAttributeTable = true
} else if selector.FieldContext == telemetrytypes.FieldContextResource {
queryResourceTable = true
}
}
tablesToQuery := []struct {
fieldContext telemetrytypes.FieldContext
shouldQuery bool
tblName string
}{
{telemetrytypes.FieldContextAttribute, queryAttributeTable, t.auditDBName + "." + t.auditAttributeKeysTblName},
{telemetrytypes.FieldContextResource, queryResourceTable, t.auditDBName + "." + t.auditResourceKeysTblName},
}
for _, table := range tablesToQuery {
if !table.shouldQuery {
continue
}
fieldContext := table.fieldContext
tblName := table.tblName
sb := sqlbuilder.Select(
"name AS tag_key",
fmt.Sprintf("'%s' AS tag_type", fieldContext.TagType()),
"lower(datatype) AS tag_data_type",
fmt.Sprintf("%d AS priority", getPriorityForContext(fieldContext)),
).From(tblName)
var limit int
conds := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
if fieldKeySelector.FieldContext != telemetrytypes.FieldContextUnspecified && fieldKeySelector.FieldContext != fieldContext {
continue
}
fieldKeyConds := []string{}
if fieldKeySelector.SelectorMatchType == telemetrytypes.FieldSelectorMatchTypeExact {
fieldKeyConds = append(fieldKeyConds, sb.E("name", fieldKeySelector.Name))
} else {
fieldKeyConds = append(fieldKeyConds, sb.ILike("name", "%"+escapeForLike(fieldKeySelector.Name)+"%"))
}
if fieldKeySelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
fieldKeyConds = append(fieldKeyConds, sb.E("datatype", fieldKeySelector.FieldDataType.TagDataType()))
}
if len(fieldKeyConds) > 0 {
conds = append(conds, sb.And(fieldKeyConds...))
}
limit += fieldKeySelector.Limit
}
if len(conds) > 0 {
sb.Where(sb.Or(conds...))
}
sb.GroupBy("name", "datatype")
if limit == 0 {
limit = 1000
}
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
queries = append(queries, query)
allArgs = append(allArgs, args...)
}
if len(queries) == 0 {
return []*telemetrytypes.TelemetryFieldKey{}, true, nil
}
var limit int
for _, fieldKeySelector := range fieldKeySelectors {
limit += fieldKeySelector.Limit
}
if limit == 0 {
limit = 1000
}
mainQuery := fmt.Sprintf(`
SELECT tag_key, tag_type, tag_data_type, max(priority) as priority
FROM (
%s
) AS combined_results
GROUP BY tag_key, tag_type, tag_data_type
ORDER BY priority
LIMIT %d
`, strings.Join(queries, " UNION ALL "), limit+1)
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, mainQuery, allArgs...)
if err != nil {
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
}
defer rows.Close()
keys := []*telemetrytypes.TelemetryFieldKey{}
rowCount := 0
searchTexts := []string{}
for _, fieldKeySelector := range fieldKeySelectors {
searchTexts = append(searchTexts, fieldKeySelector.Name)
}
for rows.Next() {
rowCount++
if rowCount > limit {
break
}
var name string
var fieldContext telemetrytypes.FieldContext
var fieldDataType telemetrytypes.FieldDataType
var priority uint8
err = rows.Scan(&name, &fieldContext, &fieldDataType, &priority)
if err != nil {
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
}
key, ok := mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()]
if !ok {
key = &telemetrytypes.TelemetryFieldKey{
Name: name,
Signal: telemetrytypes.SignalLogs,
FieldContext: fieldContext,
FieldDataType: fieldDataType,
}
}
keys = append(keys, key)
mapOfKeys[name+";"+fieldContext.StringValue()+";"+fieldDataType.StringValue()] = key
}
if rows.Err() != nil {
return nil, false, errors.Wrap(rows.Err(), errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
}
complete := rowCount <= limit
// Add intrinsic audit fields (same as logs intrinsics: body, severity_text, etc.)
staticKeys := maps.Keys(telemetryaudit.IntrinsicFields)
for _, key := range staticKeys {
found := false
for _, v := range searchTexts {
if v == "" || strings.Contains(key, v) {
found = true
break
}
}
if found {
if field, exists := telemetryaudit.IntrinsicFields[key]; exists {
if _, added := mapOfKeys[field.Name+";"+field.FieldContext.StringValue()+";"+field.FieldDataType.StringValue()]; !added {
keys = append(keys, &field)
}
}
}
}
return keys, complete, nil
}
func getPriorityForContext(ctx telemetrytypes.FieldContext) int {
switch ctx {
case telemetrytypes.FieldContextLog:
@@ -1127,11 +889,7 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
case telemetrytypes.SignalTraces:
keys, complete, err = t.getTracesKeys(ctx, selectors)
case telemetrytypes.SignalLogs:
if fieldKeySelector.Source == telemetrytypes.SourceAudit {
keys, complete, err = t.getAuditKeys(ctx, selectors)
} else {
keys, complete, err = t.getLogsKeys(ctx, selectors)
}
keys, complete, err = t.getLogsKeys(ctx, selectors)
case telemetrytypes.SignalMetrics:
if fieldKeySelector.Source == telemetrytypes.SourceMeter {
keys, complete, err = t.getMeterSourceMetricKeys(ctx, selectors)
@@ -1180,7 +938,6 @@ func (t *telemetryMetaStore) GetKeys(ctx context.Context, fieldKeySelector *tele
func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors []*telemetrytypes.FieldKeySelector) (map[string][]*telemetrytypes.TelemetryFieldKey, bool, error) {
logsSelectors := []*telemetrytypes.FieldKeySelector{}
auditSelectors := []*telemetrytypes.FieldKeySelector{}
tracesSelectors := []*telemetrytypes.FieldKeySelector{}
metricsSelectors := []*telemetrytypes.FieldKeySelector{}
meterSourceMetricsSelectors := []*telemetrytypes.FieldKeySelector{}
@@ -1188,11 +945,7 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
for _, fieldKeySelector := range fieldKeySelectors {
switch fieldKeySelector.Signal {
case telemetrytypes.SignalLogs:
if fieldKeySelector.Source == telemetrytypes.SourceAudit {
auditSelectors = append(auditSelectors, fieldKeySelector)
} else {
logsSelectors = append(logsSelectors, fieldKeySelector)
}
logsSelectors = append(logsSelectors, fieldKeySelector)
case telemetrytypes.SignalTraces:
tracesSelectors = append(tracesSelectors, fieldKeySelector)
case telemetrytypes.SignalMetrics:
@@ -1212,10 +965,6 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
if err != nil {
return nil, false, err
}
auditKeys, auditComplete, err := t.getAuditKeys(ctx, auditSelectors)
if err != nil {
return nil, false, err
}
tracesKeys, tracesComplete, err := t.getTracesKeys(ctx, tracesSelectors)
if err != nil {
return nil, false, err
@@ -1230,15 +979,12 @@ func (t *telemetryMetaStore) GetKeysMulti(ctx context.Context, fieldKeySelectors
return nil, false, err
}
// Complete only if all queries are complete
complete := logsComplete && auditComplete && tracesComplete && metricsComplete
complete := logsComplete && tracesComplete && metricsComplete
mapOfKeys := make(map[string][]*telemetrytypes.TelemetryFieldKey)
for _, key := range logsKeys {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
}
for _, key := range auditKeys {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
}
for _, key := range tracesKeys {
mapOfKeys[key.Name] = append(mapOfKeys[key.Name], key)
}
@@ -1592,97 +1338,6 @@ func (t *telemetryMetaStore) getLogFieldValues(ctx context.Context, fieldValueSe
return values, complete, nil
}
func (t *telemetryMetaStore) getAuditFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
instrumentationtypes.TelemetrySignal: telemetrytypes.SignalLogs.StringValue(),
instrumentationtypes.CodeNamespace: "metadata",
instrumentationtypes.CodeFunctionName: "getAuditFieldValues",
})
limit := fieldValueSelector.Limit
if limit == 0 {
limit = 50
}
sb := sqlbuilder.Select("DISTINCT string_value, number_value").From(t.auditDBName + "." + t.auditFieldsTblName)
if fieldValueSelector.Name != "" {
sb.Where(sb.E("tag_key", fieldValueSelector.Name))
}
if fieldValueSelector.FieldContext != telemetrytypes.FieldContextUnspecified {
sb.Where(sb.E("tag_type", fieldValueSelector.FieldContext.TagType()))
}
if fieldValueSelector.FieldDataType != telemetrytypes.FieldDataTypeUnspecified {
sb.Where(sb.E("tag_data_type", fieldValueSelector.FieldDataType.TagDataType()))
}
if fieldValueSelector.Value != "" {
switch fieldValueSelector.FieldDataType {
case telemetrytypes.FieldDataTypeString:
sb.Where(sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
case telemetrytypes.FieldDataTypeNumber:
sb.Where(sb.IsNotNull("number_value"))
sb.Where(sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"))
case telemetrytypes.FieldDataTypeUnspecified:
sb.Where(sb.Or(
sb.ILike("string_value", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
sb.ILike("toString(number_value)", "%"+escapeForLike(fieldValueSelector.Value)+"%"),
))
}
}
// fetch one extra row to detect whether the result set is complete
sb.Limit(limit + 1)
query, args := sb.BuildWithFlavor(sqlbuilder.ClickHouse)
rows, err := t.telemetrystore.ClickhouseDB().Query(ctx, query, args...)
if err != nil {
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
}
defer rows.Close()
values := &telemetrytypes.TelemetryFieldValues{}
seen := make(map[string]bool)
rowCount := 0
totalCount := 0
for rows.Next() {
rowCount++
var stringValue string
var numberValue float64
err = rows.Scan(&stringValue, &numberValue)
if err != nil {
return nil, false, errors.Wrap(err, errors.TypeInternal, errors.CodeInternal, ErrFailedToGetAuditKeys.Error())
}
if stringValue != "" && !seen[stringValue] {
if totalCount >= limit {
break
}
values.StringValues = append(values.StringValues, stringValue)
seen[stringValue] = true
totalCount++
}
if numberValue != 0 {
if totalCount >= limit {
break
}
if !seen[fmt.Sprintf("%f", numberValue)] {
values.NumberValues = append(values.NumberValues, numberValue)
seen[fmt.Sprintf("%f", numberValue)] = true
totalCount++
}
}
}
complete := rowCount <= limit
return values, complete, nil
}
// getMetricFieldValues returns field values and whether the result is complete.
func (t *telemetryMetaStore) getMetricFieldValues(ctx context.Context, fieldValueSelector *telemetrytypes.FieldValueSelector) (*telemetrytypes.TelemetryFieldValues, bool, error) {
ctx = ctxtypes.NewContextWithCommentVals(ctx, map[string]string{
@@ -1973,11 +1628,7 @@ func (t *telemetryMetaStore) GetAllValues(ctx context.Context, fieldValueSelecto
case telemetrytypes.SignalTraces:
values, complete, err = t.getSpanFieldValues(ctx, fieldValueSelector)
case telemetrytypes.SignalLogs:
if fieldValueSelector.Source == telemetrytypes.SourceAudit {
values, complete, err = t.getAuditFieldValues(ctx, fieldValueSelector)
} else {
values, complete, err = t.getLogFieldValues(ctx, fieldValueSelector)
}
values, complete, err = t.getLogFieldValues(ctx, fieldValueSelector)
case telemetrytypes.SignalMetrics:
if fieldValueSelector.Source == telemetrytypes.SourceMeter {
values, complete, err = t.getMeterSourceMetricFieldValues(ctx, fieldValueSelector)

View File

@@ -5,7 +5,6 @@ import (
"testing"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/telemetryaudit"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -38,11 +37,6 @@ func TestGetFirstSeenFromMetricMetadata(t *testing.T) {
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetryaudit.DBName,
telemetryaudit.AuditLogsTableName,
telemetryaudit.TagAttributesTableName,
telemetryaudit.LogAttributeKeysTblName,
telemetryaudit.LogResourceKeysTblName,
DBName,
AttributesMetadataLocalTableName,
ColumnEvolutionMetadataTableName,

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/telemetryaudit"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
@@ -37,11 +36,6 @@ func newTestTelemetryMetaStoreTestHelper(store telemetrystore.TelemetryStore) te
telemetrylogs.TagAttributesV2TableName,
telemetrylogs.LogAttributeKeysTblName,
telemetrylogs.LogResourceKeysTblName,
telemetryaudit.DBName,
telemetryaudit.AuditLogsTableName,
telemetryaudit.TagAttributesTableName,
telemetryaudit.LogAttributeKeysTblName,
telemetryaudit.LogResourceKeysTblName,
DBName,
AttributesMetadataLocalTableName,
ColumnEvolutionMetadataTableName,

View File

@@ -9,6 +9,6 @@ const (
ColumnEvolutionMetadataTableName = "distributed_column_evolution_metadata"
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
// Column Evolution table stores promoted paths as (signal, column_name, field_context, field_name); see signoz-otel-collector metadata_migrations.
PromotedPathsTableName = "distributed_column_evolution_metadata"
SkipIndexTableName = "system.data_skipping_indices"
PromotedPathsTableName = "distributed_column_evolution_metadata"
SkipIndexTableName = "system.data_skipping_indices"
)

View File

@@ -21,7 +21,6 @@ type resourceFilterStatementBuilder[T any] struct {
conditionBuilder qbtypes.ConditionBuilder
metadataStore telemetrytypes.MetadataStore
signal telemetrytypes.Signal
source telemetrytypes.Source
fullTextColumn *telemetrytypes.TelemetryFieldKey
jsonKeyToKey qbtypes.JsonKeyToFieldFunc
@@ -38,7 +37,6 @@ func New[T any](
dbName string,
tableName string,
signal telemetrytypes.Signal,
source telemetrytypes.Source,
metadataStore telemetrytypes.MetadataStore,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
@@ -54,7 +52,6 @@ func New[T any](
conditionBuilder: cb,
metadataStore: metadataStore,
signal: signal,
source: source,
fullTextColumn: fullTextColumn,
jsonKeyToKey: jsonKeyToKey,
}
@@ -75,7 +72,6 @@ func (b *resourceFilterStatementBuilder[T]) getKeySelectors(query qbtypes.QueryB
continue
}
keySelectors[idx].Signal = b.signal
keySelectors[idx].Source = b.source
keySelectors[idx].SelectorMatchType = telemetrytypes.FieldSelectorMatchTypeExact
filteredKeySelectors = append(filteredKeySelectors, keySelectors[idx])
}

View File

@@ -375,7 +375,6 @@ func TestResourceFilterStatementBuilder_Traces(t *testing.T) {
"signoz_traces",
"distributed_traces_v3_resource",
telemetrytypes.SignalTraces,
telemetrytypes.SourceUnspecified,
mockMetadataStore,
nil,
nil,
@@ -593,7 +592,6 @@ func TestResourceFilterStatementBuilder_Logs(t *testing.T) {
"signoz_logs",
"distributed_logs_v2_resource",
telemetrytypes.SignalLogs,
telemetrytypes.SourceUnspecified,
mockMetadataStore,
nil,
nil,
@@ -655,7 +653,6 @@ func TestResourceFilterStatementBuilder_Variables(t *testing.T) {
"signoz_traces",
"distributed_traces_v3_resource",
telemetrytypes.SignalTraces,
telemetrytypes.SourceUnspecified,
mockMetadataStore,
nil,
nil,

View File

@@ -49,7 +49,6 @@ func NewTraceQueryStatementBuilder(
DBName,
TracesResourceV3TableName,
telemetrytypes.SignalTraces,
telemetrytypes.SourceUnspecified,
metadataStore,
nil,
nil,

View File

@@ -39,7 +39,6 @@ func NewTraceOperatorStatementBuilder(
DBName,
TracesResourceV3TableName,
telemetrytypes.SignalTraces,
telemetrytypes.SourceUnspecified,
metadataStore,
nil,
nil,

View File

@@ -7,13 +7,11 @@ type Source struct {
}
var (
SourceAudit = Source{valuer.NewString("audit")}
SourceMeter = Source{valuer.NewString("meter")}
SourceUnspecified = Source{valuer.NewString("")}
)
// Enum returns the acceptable values for Source.
// TODO: Add SourceAudit once the frontend is ready for consumption.
func (Source) Enum() []any {
return []any{
SourceMeter,

View File

@@ -12,7 +12,6 @@ pytest_plugins = [
"fixtures.sqlite",
"fixtures.zookeeper",
"fixtures.signoz",
"fixtures.audit",
"fixtures.logs",
"fixtures.traces",
"fixtures.metrics",

View File

@@ -1,404 +0,0 @@
import datetime
import json
from abc import ABC
from typing import Any, Callable, Generator, List, Optional
import numpy as np
import pytest
from ksuid import KsuidMs
from fixtures import types
from fixtures.fingerprint import LogsOrTracesFingerprint
class AuditResource(ABC):
labels: str
fingerprint: str
seen_at_ts_bucket_start: np.int64
def __init__(
self,
labels: dict[str, str],
fingerprint: str,
seen_at_ts_bucket_start: np.int64,
) -> None:
self.labels = json.dumps(labels, separators=(",", ":"))
self.fingerprint = fingerprint
self.seen_at_ts_bucket_start = seen_at_ts_bucket_start
def np_arr(self) -> np.array:
return np.array(
[
self.labels,
self.fingerprint,
self.seen_at_ts_bucket_start,
]
)
class AuditResourceOrAttributeKeys(ABC):
name: str
datatype: str
def __init__(self, name: str, datatype: str) -> None:
self.name = name
self.datatype = datatype
def np_arr(self) -> np.array:
return np.array([self.name, self.datatype])
class AuditTagAttributes(ABC):
unix_milli: np.int64
tag_key: str
tag_type: str
tag_data_type: str
string_value: str
int64_value: Optional[np.int64]
float64_value: Optional[np.float64]
def __init__(
self,
timestamp: datetime.datetime,
tag_key: str,
tag_type: str,
tag_data_type: str,
string_value: Optional[str],
int64_value: Optional[np.int64],
float64_value: Optional[np.float64],
) -> None:
self.unix_milli = np.int64(int(timestamp.timestamp() * 1e3))
self.tag_key = tag_key
self.tag_type = tag_type
self.tag_data_type = tag_data_type
self.string_value = string_value or ""
self.int64_value = int64_value
self.float64_value = float64_value
def np_arr(self) -> np.array:
return np.array(
[
self.unix_milli,
self.tag_key,
self.tag_type,
self.tag_data_type,
self.string_value,
self.int64_value,
self.float64_value,
]
)
class AuditLog(ABC):
"""Represents a single audit log event in signoz_audit.
Matches the ClickHouse DDL from the schema migration (ticket #1936):
- Database: signoz_audit
- Local table: logs
- Distributed table: distributed_logs
- No resources_string column (resource JSON only)
- Has event_name column
- 7 materialized columns auto-populated from attributes_string at INSERT time
"""
ts_bucket_start: np.uint64
resource_fingerprint: str
timestamp: np.uint64
observed_timestamp: np.uint64
id: str
trace_id: str
span_id: str
trace_flags: np.uint32
severity_text: str
severity_number: np.uint8
body: str
scope_name: str
scope_version: str
scope_string: dict[str, str]
attributes_string: dict[str, str]
attributes_number: dict[str, np.float64]
attributes_bool: dict[str, bool]
resource_json: dict[str, str]
event_name: str
resource: List[AuditResource]
tag_attributes: List[AuditTagAttributes]
resource_keys: List[AuditResourceOrAttributeKeys]
attribute_keys: List[AuditResourceOrAttributeKeys]
def __init__(
self,
timestamp: Optional[datetime.datetime] = None,
resources: dict[str, Any] = {},
attributes: dict[str, Any] = {},
body: str = "",
event_name: str = "",
severity_text: str = "INFO",
trace_id: str = "",
span_id: str = "",
trace_flags: np.uint32 = 0,
scope_name: str = "signoz.audit",
scope_version: str = "",
) -> None:
if timestamp is None:
timestamp = datetime.datetime.now()
self.tag_attributes = []
self.attribute_keys = []
self.resource_keys = []
self.timestamp = np.uint64(int(timestamp.timestamp() * 1e9))
self.observed_timestamp = self.timestamp
minute = timestamp.minute
bucket_minute = 0 if minute < 30 else 30
bucket_start = timestamp.replace(minute=bucket_minute, second=0, microsecond=0)
self.ts_bucket_start = np.uint64(int(bucket_start.timestamp()))
self.id = str(KsuidMs(datetime=timestamp))
self.trace_id = trace_id
self.span_id = span_id
self.trace_flags = trace_flags
self.severity_text = severity_text
self.severity_number = np.uint8(9 if severity_text == "INFO" else 17)
self.body = body
self.event_name = event_name
# Resources — JSON column only (no resources_string in audit DDL)
self.resource_json = {k: str(v) for k, v in resources.items()}
for k, v in self.resource_json.items():
self.tag_attributes.append(
AuditTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="resource",
tag_data_type="string",
string_value=str(v),
int64_value=None,
float64_value=None,
)
)
self.resource_keys.append(
AuditResourceOrAttributeKeys(name=k, datatype="string")
)
self.resource_fingerprint = LogsOrTracesFingerprint(
self.resource_json
).calculate()
# Process attributes by type
self.attributes_string = {}
self.attributes_number = {}
self.attributes_bool = {}
for k, v in attributes.items():
if isinstance(v, bool):
self.attributes_bool[k] = v
self.tag_attributes.append(
AuditTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="bool",
string_value=None,
int64_value=None,
float64_value=None,
)
)
self.attribute_keys.append(
AuditResourceOrAttributeKeys(name=k, datatype="bool")
)
elif isinstance(v, int):
self.attributes_number[k] = np.float64(v)
self.tag_attributes.append(
AuditTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="int64",
string_value=None,
int64_value=np.int64(v),
float64_value=None,
)
)
self.attribute_keys.append(
AuditResourceOrAttributeKeys(name=k, datatype="int64")
)
elif isinstance(v, float):
self.attributes_number[k] = np.float64(v)
self.tag_attributes.append(
AuditTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="float64",
string_value=None,
int64_value=None,
float64_value=np.float64(v),
)
)
self.attribute_keys.append(
AuditResourceOrAttributeKeys(name=k, datatype="float64")
)
else:
self.attributes_string[k] = str(v)
self.tag_attributes.append(
AuditTagAttributes(
timestamp=timestamp,
tag_key=k,
tag_type="tag",
tag_data_type="string",
string_value=str(v),
int64_value=None,
float64_value=None,
)
)
self.attribute_keys.append(
AuditResourceOrAttributeKeys(name=k, datatype="string")
)
self.scope_name = scope_name
self.scope_version = scope_version
self.scope_string = {}
self.resource = [
AuditResource(
labels=self.resource_json,
fingerprint=self.resource_fingerprint,
seen_at_ts_bucket_start=self.ts_bucket_start,
)
]
def np_arr(self) -> np.array:
return np.array(
[
self.ts_bucket_start,
self.resource_fingerprint,
self.timestamp,
self.observed_timestamp,
self.id,
self.trace_id,
self.span_id,
self.trace_flags,
self.severity_text,
self.severity_number,
self.body,
self.scope_name,
self.scope_version,
self.scope_string,
self.attributes_string,
self.attributes_number,
self.attributes_bool,
self.resource_json,
self.event_name,
]
)
@pytest.fixture(name="insert_audit_logs", scope="function")
def insert_audit_logs(
clickhouse: types.TestContainerClickhouse,
) -> Generator[Callable[[List[AuditLog]], None], Any, None]:
def _insert_audit_logs(logs: List[AuditLog]) -> None:
resources: List[AuditResource] = []
for log in logs:
resources.extend(log.resource)
if len(resources) > 0:
clickhouse.conn.insert(
database="signoz_audit",
table="distributed_logs_resource",
data=[resource.np_arr() for resource in resources],
column_names=[
"labels",
"fingerprint",
"seen_at_ts_bucket_start",
],
)
tag_attributes: List[AuditTagAttributes] = []
for log in logs:
tag_attributes.extend(log.tag_attributes)
if len(tag_attributes) > 0:
clickhouse.conn.insert(
database="signoz_audit",
table="distributed_tag_attributes",
data=[ta.np_arr() for ta in tag_attributes],
column_names=[
"unix_milli",
"tag_key",
"tag_type",
"tag_data_type",
"string_value",
"int64_value",
"float64_value",
],
)
attribute_keys: List[AuditResourceOrAttributeKeys] = []
for log in logs:
attribute_keys.extend(log.attribute_keys)
if len(attribute_keys) > 0:
clickhouse.conn.insert(
database="signoz_audit",
table="distributed_logs_attribute_keys",
data=[ak.np_arr() for ak in attribute_keys],
column_names=["name", "datatype"],
)
resource_keys: List[AuditResourceOrAttributeKeys] = []
for log in logs:
resource_keys.extend(log.resource_keys)
if len(resource_keys) > 0:
clickhouse.conn.insert(
database="signoz_audit",
table="distributed_logs_resource_keys",
data=[rk.np_arr() for rk in resource_keys],
column_names=["name", "datatype"],
)
clickhouse.conn.insert(
database="signoz_audit",
table="distributed_logs",
data=[log.np_arr() for log in logs],
column_names=[
"ts_bucket_start",
"resource_fingerprint",
"timestamp",
"observed_timestamp",
"id",
"trace_id",
"span_id",
"trace_flags",
"severity_text",
"severity_number",
"body",
"scope_name",
"scope_version",
"scope_string",
"attributes_string",
"attributes_number",
"attributes_bool",
"resource",
"event_name",
],
)
yield _insert_audit_logs
cluster = clickhouse.env["SIGNOZ_TELEMETRYSTORE_CLICKHOUSE_CLUSTER"]
for table in [
"logs",
"logs_resource",
"tag_attributes",
"logs_attribute_keys",
"logs_resource_keys",
]:
clickhouse.conn.query(
f"TRUNCATE TABLE signoz_audit.{table} ON CLUSTER '{cluster}' SYNC"
)

View File

@@ -38,7 +38,6 @@ class OrderBy:
class BuilderQuery:
signal: str
name: str = "A"
source: Optional[str] = None
limit: Optional[int] = None
filter_expression: Optional[str] = None
select_fields: Optional[List[TelemetryFieldKey]] = None
@@ -49,8 +48,6 @@ class BuilderQuery:
"signal": self.signal,
"name": self.name,
}
if self.source:
spec["source"] = self.source
if self.limit is not None:
spec["limit"] = self.limit
if self.filter_expression:
@@ -58,9 +55,7 @@ class BuilderQuery:
if self.select_fields:
spec["selectFields"] = [f.to_dict() for f in self.select_fields]
if self.order:
spec["order"] = [
o.to_dict() if hasattr(o, "to_dict") else o for o in self.order
]
spec["order"] = [o.to_dict() for o in self.order]
return {"type": "builder_query", "spec": spec}
@@ -81,9 +76,7 @@ class TraceOperatorQuery:
if self.limit is not None:
spec["limit"] = self.limit
if self.order:
spec["order"] = [
o.to_dict() if hasattr(o, "to_dict") else o for o in self.order
]
spec["order"] = [o.to_dict() for o in self.order]
return {"type": "builder_trace_operator", "spec": spec}
@@ -449,7 +442,6 @@ def build_scalar_query(
signal: str,
aggregations: List[Dict],
*,
source: Optional[str] = None,
group_by: Optional[List[Dict]] = None,
order: Optional[List[Dict]] = None,
limit: Optional[int] = None,
@@ -466,9 +458,6 @@ def build_scalar_query(
"aggregations": aggregations,
}
if source:
spec["source"] = source
if group_by:
spec["groupBy"] = group_by

View File

@@ -1,441 +0,0 @@
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List
import pytest
from fixtures import types
from fixtures.audit import AuditLog
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.querier import (
BuilderQuery,
build_logs_aggregation,
build_order_by,
build_scalar_query,
make_query_request,
)
def test_audit_list_all(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_audit_logs: Callable[[List[AuditLog]], None],
) -> None:
"""List audit events across multiple resource types — verify count, ordering, and fields."""
now = datetime.now(tz=timezone.utc)
insert_audit_logs(
[
AuditLog(
timestamp=now - timedelta(seconds=3),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "alert-rule",
"signoz.audit.resource.id": "alert-001",
},
attributes={
"signoz.audit.principal.id": "user-010",
"signoz.audit.principal.email": "ops@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "create",
"signoz.audit.outcome": "success",
},
body="ops@acme.com (user-010) created alert-rule (alert-001)",
event_name="alert-rule.created",
severity_text="INFO",
),
AuditLog(
timestamp=now - timedelta(seconds=2),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "saved-view",
"signoz.audit.resource.id": "view-001",
},
attributes={
"signoz.audit.principal.id": "user-010",
"signoz.audit.principal.email": "ops@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "update",
"signoz.audit.outcome": "success",
},
body="ops@acme.com (user-010) updated saved-view (view-001)",
event_name="saved-view.updated",
severity_text="INFO",
),
AuditLog(
timestamp=now - timedelta(seconds=1),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "user",
"signoz.audit.resource.id": "user-020",
},
attributes={
"signoz.audit.principal.id": "user-010",
"signoz.audit.principal.email": "ops@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "update",
"signoz.audit.action_category": "access_control",
"signoz.audit.outcome": "success",
},
body="ops@acme.com (user-010) updated user (user-020)",
event_name="user.role.changed",
severity_text="INFO",
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
response = make_query_request(
signoz,
token,
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
end_ms=int(now.timestamp() * 1000),
queries=[
BuilderQuery(
signal="logs",
source="audit",
limit=100,
order=[build_order_by("timestamp"), build_order_by("id")],
).to_dict()
],
request_type="raw",
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
rows = response.json()["data"]["data"]["results"][0]["rows"]
assert len(rows) == 3
# Most recent first
assert rows[0]["data"]["event_name"] == "user.role.changed"
assert rows[1]["data"]["event_name"] == "saved-view.updated"
assert rows[2]["data"]["event_name"] == "alert-rule.created"
# Verify event_name and body are present
assert rows[0]["data"]["body"] == "ops@acme.com (user-010) updated user (user-020)"
assert rows[0]["data"]["severity_text"] == "INFO"
@pytest.mark.parametrize(
"filter_expression,expected_count,expected_event_names",
[
pytest.param(
"signoz.audit.principal.id = 'user-001'",
3,
{"session.login", "dashboard.updated", "dashboard.created"},
id="filter_by_principal_id",
),
pytest.param(
"signoz.audit.outcome = 'failure'",
1,
{"dashboard.deleted"},
id="filter_by_outcome_failure",
),
pytest.param(
"signoz.audit.resource.kind = 'dashboard'"
" AND signoz.audit.resource.id = 'dash-001'",
3,
{"dashboard.deleted", "dashboard.updated", "dashboard.created"},
id="filter_by_resource_kind_and_id",
),
pytest.param(
"signoz.audit.principal.type = 'service_account'",
1,
{"serviceaccount.apikey.created"},
id="filter_by_principal_type",
),
pytest.param(
"signoz.audit.resource.kind = 'dashboard'"
" AND signoz.audit.action = 'delete'",
1,
{"dashboard.deleted"},
id="filter_by_resource_kind_and_action",
),
],
)
def test_audit_filter(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_audit_logs: Callable[[List[AuditLog]], None],
filter_expression: str,
expected_count: int,
expected_event_names: set,
) -> None:
"""Parametrized audit filter tests covering the documented query patterns."""
now = datetime.now(tz=timezone.utc)
insert_audit_logs(
[
AuditLog(
timestamp=now - timedelta(seconds=5),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "dashboard",
"signoz.audit.resource.id": "dash-001",
},
attributes={
"signoz.audit.principal.id": "user-001",
"signoz.audit.principal.email": "alice@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "create",
"signoz.audit.action_category": "configuration_change",
"signoz.audit.outcome": "success",
},
body="alice@acme.com created dashboard",
event_name="dashboard.created",
),
AuditLog(
timestamp=now - timedelta(seconds=4),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "dashboard",
"signoz.audit.resource.id": "dash-001",
},
attributes={
"signoz.audit.principal.id": "user-001",
"signoz.audit.principal.email": "alice@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "update",
"signoz.audit.action_category": "configuration_change",
"signoz.audit.outcome": "success",
},
body="alice@acme.com updated dashboard",
event_name="dashboard.updated",
),
AuditLog(
timestamp=now - timedelta(seconds=3),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "dashboard",
"signoz.audit.resource.id": "dash-001",
},
attributes={
"signoz.audit.principal.id": "user-002",
"signoz.audit.principal.email": "viewer@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "delete",
"signoz.audit.action_category": "configuration_change",
"signoz.audit.outcome": "failure",
"signoz.audit.error.type": "forbidden",
"signoz.audit.error.code": "authz_forbidden",
},
body="viewer@acme.com failed to delete dashboard",
event_name="dashboard.deleted",
severity_text="ERROR",
),
AuditLog(
timestamp=now - timedelta(seconds=2),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "serviceaccount",
"signoz.audit.resource.id": "sa-001",
},
attributes={
"signoz.audit.principal.id": "sa-001",
"signoz.audit.principal.email": "",
"signoz.audit.principal.type": "service_account",
"signoz.audit.action": "create",
"signoz.audit.action_category": "access_control",
"signoz.audit.outcome": "success",
},
body="sa-001 created serviceaccount",
event_name="serviceaccount.apikey.created",
),
AuditLog(
timestamp=now - timedelta(seconds=1),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "session",
"signoz.audit.resource.id": "*",
},
attributes={
"signoz.audit.principal.id": "user-001",
"signoz.audit.principal.email": "alice@acme.com",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "login",
"signoz.audit.action_category": "access_control",
"signoz.audit.outcome": "success",
},
body="alice@acme.com login session",
event_name="session.login",
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
response = make_query_request(
signoz,
token,
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
end_ms=int(now.timestamp() * 1000),
queries=[
BuilderQuery(
signal="logs",
source="audit",
limit=100,
filter_expression=filter_expression,
order=[build_order_by("timestamp"), build_order_by("id")],
).to_dict()
],
request_type="raw",
)
assert response.status_code == HTTPStatus.OK
rows = response.json()["data"]["data"]["results"][0]["rows"]
assert len(rows) == expected_count
actual_event_names = {row["data"]["event_name"] for row in rows}
assert actual_event_names == expected_event_names
def test_audit_scalar_count_failures(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_audit_logs: Callable[[List[AuditLog]], None],
) -> None:
"""Alert query — count multiple failures from different principals."""
now = datetime.now(tz=timezone.utc)
insert_audit_logs(
[
AuditLog(
timestamp=now - timedelta(seconds=3),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "dashboard",
"signoz.audit.resource.id": "dash-100",
},
attributes={
"signoz.audit.principal.id": "user-050",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "delete",
"signoz.audit.outcome": "failure",
},
body="user-050 failed to delete dashboard",
event_name="dashboard.deleted",
severity_text="ERROR",
),
AuditLog(
timestamp=now - timedelta(seconds=2),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "alert-rule",
"signoz.audit.resource.id": "alert-200",
},
attributes={
"signoz.audit.principal.id": "user-060",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "update",
"signoz.audit.outcome": "failure",
},
body="user-060 failed to update alert-rule",
event_name="alert-rule.updated",
severity_text="ERROR",
),
AuditLog(
timestamp=now - timedelta(seconds=1),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "dashboard",
"signoz.audit.resource.id": "dash-100",
},
attributes={
"signoz.audit.principal.id": "user-050",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "update",
"signoz.audit.outcome": "success",
},
body="user-050 updated dashboard",
event_name="dashboard.updated",
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
response = make_query_request(
signoz,
token,
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
end_ms=int(now.timestamp() * 1000),
queries=[
build_scalar_query(
name="A",
signal="logs",
source="audit",
aggregations=[build_logs_aggregation("count()")],
filter_expression="signoz.audit.outcome = 'failure'",
)
],
request_type="scalar",
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
scalar_data = response.json()["data"]["data"]["results"][0].get("data", [])
assert len(scalar_data) == 1
assert scalar_data[0][0] == 2
def test_audit_does_not_leak_into_logs(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_audit_logs: Callable[[List[AuditLog]], None],
) -> None:
"""A single audit event in signoz_audit must not appear in regular log queries."""
now = datetime.now(tz=timezone.utc)
insert_audit_logs(
[
AuditLog(
timestamp=now - timedelta(seconds=1),
resources={
"service.name": "signoz",
"signoz.audit.resource.kind": "organization",
"signoz.audit.resource.id": "org-999",
},
attributes={
"signoz.audit.principal.id": "user-admin",
"signoz.audit.principal.type": "user",
"signoz.audit.action": "update",
"signoz.audit.outcome": "success",
},
body="user-admin updated organization (org-999)",
event_name="organization.updated",
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
now = datetime.now(tz=timezone.utc)
response = make_query_request(
signoz,
token,
start_ms=int((now - timedelta(seconds=30)).timestamp() * 1000),
end_ms=int(now.timestamp() * 1000),
queries=[
BuilderQuery(
signal="logs",
limit=100,
order=[build_order_by("timestamp"), build_order_by("id")],
).to_dict()
],
request_type="raw",
)
assert response.status_code == HTTPStatus.OK
rows = response.json()["data"]["data"]["results"][0].get("rows") or []
audit_bodies = [
row["data"]["body"]
for row in rows
if "signoz.audit"
in row["data"].get("attributes_string", {}).get("signoz.audit.action", "")
]
assert len(audit_bodies) == 0