mirror of
https://github.com/SigNoz/signoz.git
synced 2026-02-19 23:42:29 +00:00
Compare commits
1 Commits
merge-json
...
feat/authz
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e4b2c5d894 |
332
frontend/src/components/GuardAuthZ/GuardAuthZ.test.tsx
Normal file
332
frontend/src/components/GuardAuthZ/GuardAuthZ.test.tsx
Normal file
@@ -0,0 +1,332 @@
|
||||
import { ReactElement } from 'react-markdown/lib/react-markdown';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { BrandedPermission, buildPermission } from 'hooks/useAuthZ/utils';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import { GuardAuthZ } from './GuardAuthZ';
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL || '';
|
||||
|
||||
describe('GuardAuthZ', () => {
|
||||
const TestChild = (): ReactElement => <div>Protected Content</div>;
|
||||
const LoadingFallback = (): ReactElement => <div>Loading...</div>;
|
||||
const ErrorFallback = (error: Error): ReactElement => (
|
||||
<div>Error occurred: {error.message}</div>
|
||||
);
|
||||
const NoPermissionFallback = (_response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}): ReactElement => <div>Access denied</div>;
|
||||
const NoPermissionFallbackWithSuggestions = (response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}): ReactElement => (
|
||||
<div>
|
||||
Access denied. Required permission: {response.requiredPermissionName}
|
||||
</div>
|
||||
);
|
||||
|
||||
it('should render children when permission is granted', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:*');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Protected Content')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render fallbackOnLoading when loading', () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="read"
|
||||
object="dashboard:*"
|
||||
fallbackOnLoading={<LoadingFallback />}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
expect(screen.getByText('Loading...')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when loading and no fallbackOnLoading provided', () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
expect(container.firstChild).toBeNull();
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render fallbackOnError when API error occurs', async () => {
|
||||
const errorMessage = 'Internal Server Error';
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: errorMessage }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="read"
|
||||
object="dashboard:*"
|
||||
fallbackOnError={ErrorFallback}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/Error occurred:/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass error object to fallbackOnError function', async () => {
|
||||
const errorMessage = 'Network request failed';
|
||||
let receivedError: Error | null = null;
|
||||
|
||||
const errorFallbackWithCapture = (error: Error): ReactElement => {
|
||||
receivedError = error;
|
||||
return <div>Captured error: {error.message}</div>;
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: errorMessage }));
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="read"
|
||||
object="dashboard:*"
|
||||
fallbackOnError={errorFallbackWithCapture}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(receivedError).not.toBeNull();
|
||||
});
|
||||
|
||||
expect(receivedError).toBeInstanceOf(Error);
|
||||
expect(screen.getByText(/Captured error:/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when error occurs and no fallbackOnError provided', async () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render fallbackOnNoPermissions when permission is denied', async () => {
|
||||
const permission = buildPermission('update', 'dashboard:123');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: false,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="update"
|
||||
object="dashboard:123"
|
||||
fallbackOnNoPermissions={NoPermissionFallback}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Access denied')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when permission is denied and no fallbackOnNoPermissions provided', async () => {
|
||||
const permission = buildPermission('update', 'dashboard:123');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: false,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="update" object="dashboard:123">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render null when permissions object is null', async () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json(null));
|
||||
}),
|
||||
);
|
||||
|
||||
const { container } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(container.firstChild).toBeNull();
|
||||
});
|
||||
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass requiredPermissionName to fallbackOnNoPermissions', async () => {
|
||||
const permission = buildPermission('update', 'dashboard:123');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: false,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
render(
|
||||
<GuardAuthZ
|
||||
relation="update"
|
||||
object="dashboard:123"
|
||||
fallbackOnNoPermissions={NoPermissionFallbackWithSuggestions}
|
||||
>
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText(/Access denied. Required permission:/),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByText(new RegExp(permission))).toBeInTheDocument();
|
||||
expect(screen.queryByText('Protected Content')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should handle different relation and object combinations', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (req, res, ctx) => {
|
||||
const body = req.body as {
|
||||
permissions: Array<{ relation: string; object: string }>;
|
||||
};
|
||||
const perm = body.permissions[0];
|
||||
const permKey = `${perm.relation}/${perm.object}`;
|
||||
|
||||
if (permKey === permission1) {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission1]: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission2]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { rerender } = render(
|
||||
<GuardAuthZ relation="read" object="dashboard:*">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Protected Content')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
rerender(
|
||||
<GuardAuthZ relation="delete" object="dashboard:456">
|
||||
<TestChild />
|
||||
</GuardAuthZ>,
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Protected Content')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
50
frontend/src/components/GuardAuthZ/GuardAuthZ.tsx
Normal file
50
frontend/src/components/GuardAuthZ/GuardAuthZ.tsx
Normal file
@@ -0,0 +1,50 @@
|
||||
import { ReactElement } from 'react';
|
||||
import { useAuthZ } from 'hooks/useAuthZ/useAuthZ';
|
||||
import {
|
||||
AuthZObject,
|
||||
AuthZRelation,
|
||||
BrandedPermission,
|
||||
buildPermission,
|
||||
} from 'hooks/useAuthZ/utils';
|
||||
|
||||
export type GuardAuthZProps<R extends AuthZRelation> = {
|
||||
children: ReactElement;
|
||||
relation: R;
|
||||
object: AuthZObject<R>;
|
||||
fallbackOnLoading?: JSX.Element;
|
||||
fallbackOnError?: (error: Error) => JSX.Element;
|
||||
fallbackOnNoPermissions?: (response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}) => JSX.Element;
|
||||
};
|
||||
|
||||
export function GuardAuthZ<R extends AuthZRelation>({
|
||||
children,
|
||||
relation,
|
||||
object,
|
||||
fallbackOnLoading,
|
||||
fallbackOnError,
|
||||
fallbackOnNoPermissions,
|
||||
}: GuardAuthZProps<R>): JSX.Element | null {
|
||||
const permission = buildPermission<R>(relation, object);
|
||||
|
||||
const { permissions, isLoading, error } = useAuthZ([permission]);
|
||||
|
||||
if (isLoading) {
|
||||
return fallbackOnLoading ?? null;
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return fallbackOnError?.(error) ?? null;
|
||||
}
|
||||
|
||||
if (!permissions?.[permission]?.isGranted) {
|
||||
return (
|
||||
fallbackOnNoPermissions?.({
|
||||
requiredPermissionName: permission,
|
||||
}) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
.guard-authz-error-no-authz {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
|
||||
padding: 24px;
|
||||
|
||||
.guard-authz-error-no-authz-content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: flex-start;
|
||||
gap: 8px;
|
||||
max-width: 500px;
|
||||
}
|
||||
|
||||
img {
|
||||
width: 32px;
|
||||
height: 32px;
|
||||
}
|
||||
|
||||
h3 {
|
||||
font-size: 18px;
|
||||
color: var(--bg-vanilla-100);
|
||||
line-height: 18px;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 14px;
|
||||
color: var(--bg-vanilla-400);
|
||||
line-height: 18px;
|
||||
|
||||
pre {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
background-color: var(--bg-slate-400);
|
||||
cursor: pointer;
|
||||
|
||||
svg {
|
||||
margin-left: 2px;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
489
frontend/src/components/GuardAuthZ/createGuardedRoute.test.tsx
Normal file
489
frontend/src/components/GuardAuthZ/createGuardedRoute.test.tsx
Normal file
@@ -0,0 +1,489 @@
|
||||
import { ReactElement } from 'react';
|
||||
import type { RouteComponentProps } from 'react-router-dom';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { buildPermission } from 'hooks/useAuthZ/utils';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { render, screen, waitFor } from 'tests/test-utils';
|
||||
|
||||
import { createGuardedRoute } from './createGuardedRoute';
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL || '';
|
||||
|
||||
describe('createGuardedRoute', () => {
|
||||
const TestComponent = ({ testProp }: { testProp: string }): ReactElement => (
|
||||
<div>Test Component: {testProp}</div>
|
||||
);
|
||||
|
||||
it('should render component when permission is granted', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:*');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should substitute route parameters in object string', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:123');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '123' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/123',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle multiple route parameters', async () => {
|
||||
const permission = buildPermission('update', 'dashboard:123:456');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'update',
|
||||
'dashboard:{id}:{version}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '123', version: '456' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id/:version',
|
||||
url: '/dashboard/123/456',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should keep placeholder when route parameter is missing', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:{id}');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should render loading fallback when loading', () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
expect(screen.getByText('SigNoz')).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText('Test Component: test-value'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render error fallback when API error occurs', async () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(/Something went wrong/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(
|
||||
screen.queryByText('Test Component: test-value'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render no permissions fallback when permission is denied', async () => {
|
||||
const permission = buildPermission('update', 'dashboard:123');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: false,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'update',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '123' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/123',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
const heading = document.querySelector('h3');
|
||||
expect(heading).toBeInTheDocument();
|
||||
expect(heading?.textContent).toMatch(/permission to view/i);
|
||||
});
|
||||
|
||||
expect(screen.getByText(permission, { exact: false })).toBeInTheDocument();
|
||||
expect(
|
||||
screen.queryByText('Test Component: test-value'),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should pass all props to wrapped component', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:*');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const ComponentWithMultipleProps = ({
|
||||
prop1,
|
||||
prop2,
|
||||
prop3,
|
||||
}: {
|
||||
prop1: string;
|
||||
prop2: number;
|
||||
prop3: boolean;
|
||||
}): ReactElement => (
|
||||
<div>
|
||||
{prop1} - {prop2} - {prop3.toString()}
|
||||
</div>
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
ComponentWithMultipleProps,
|
||||
'read',
|
||||
'dashboard:*',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: {},
|
||||
isExact: true,
|
||||
path: '/dashboard',
|
||||
url: '/dashboard',
|
||||
};
|
||||
|
||||
const props = {
|
||||
prop1: 'value1',
|
||||
prop2: 42,
|
||||
prop3: true,
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('value1 - 42 - true')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should memoize resolved object based on route params', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:123');
|
||||
const permission2 = buildPermission('read', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
const requestedPermissions: string[] = [];
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const body = req.body as {
|
||||
permissions: Array<{ relation: string; object: string }>;
|
||||
};
|
||||
const perm = body.permissions[0];
|
||||
requestedPermissions.push(`${perm.relation}/${perm.object}`);
|
||||
|
||||
if (perm.object === 'dashboard:123') {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission1]: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission2]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'read',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch1 = {
|
||||
params: { id: '123' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/123',
|
||||
};
|
||||
|
||||
const props1 = {
|
||||
testProp: 'test-value-1',
|
||||
match: mockMatch1,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
const { unmount } = render(<GuardedComponent {...props1} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value-1')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(requestedPermissions).toContain('read/dashboard:123');
|
||||
|
||||
unmount();
|
||||
|
||||
const mockMatch2 = {
|
||||
params: { id: '456' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/456',
|
||||
};
|
||||
|
||||
const props2 = {
|
||||
testProp: 'test-value-2',
|
||||
match: mockMatch2,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props2} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value-2')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(requestedPermissions).toContain('read/dashboard:456');
|
||||
});
|
||||
|
||||
it('should handle different relation types', async () => {
|
||||
const permission = buildPermission('delete', 'dashboard:789');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const GuardedComponent = createGuardedRoute(
|
||||
TestComponent,
|
||||
'delete',
|
||||
'dashboard:{id}',
|
||||
);
|
||||
|
||||
const mockMatch = {
|
||||
params: { id: '789' },
|
||||
isExact: true,
|
||||
path: '/dashboard/:id',
|
||||
url: '/dashboard/789',
|
||||
};
|
||||
|
||||
const props = {
|
||||
testProp: 'test-value',
|
||||
match: mockMatch,
|
||||
location: ({} as unknown) as RouteComponentProps['location'],
|
||||
history: ({} as unknown) as RouteComponentProps['history'],
|
||||
};
|
||||
|
||||
render(<GuardedComponent {...props} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Component: test-value')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
79
frontend/src/components/GuardAuthZ/createGuardedRoute.tsx
Normal file
79
frontend/src/components/GuardAuthZ/createGuardedRoute.tsx
Normal file
@@ -0,0 +1,79 @@
|
||||
import { ComponentType, ReactElement, useMemo } from 'react';
|
||||
import { RouteComponentProps } from 'react-router-dom';
|
||||
import { toast } from '@signozhq/sonner';
|
||||
import {
|
||||
AuthZObject,
|
||||
AuthZRelation,
|
||||
BrandedPermission,
|
||||
} from 'hooks/useAuthZ/utils';
|
||||
import { Copy } from 'lucide-react';
|
||||
|
||||
import { useCopyToClipboard } from '../../hooks/useCopyToClipboard';
|
||||
import ErrorBoundaryFallback from '../../pages/ErrorBoundaryFallback/ErrorBoundaryFallback';
|
||||
import AppLoading from '../AppLoading/AppLoading';
|
||||
import { GuardAuthZ } from './GuardAuthZ';
|
||||
|
||||
import './createGuardedRoute.styles.scss';
|
||||
|
||||
const onErrorFallback = (): JSX.Element => <ErrorBoundaryFallback />;
|
||||
|
||||
function OnNoPermissionsFallback(response: {
|
||||
requiredPermissionName: BrandedPermission;
|
||||
}): ReactElement {
|
||||
const { copyToClipboard } = useCopyToClipboard();
|
||||
const onClickToCopy = (): void => {
|
||||
copyToClipboard(response.requiredPermissionName, 'required-permission');
|
||||
|
||||
toast.success('Permission copied to clipboard');
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="guard-authz-error-no-authz">
|
||||
<div className="guard-authz-error-no-authz-content">
|
||||
<img src="/Icons/no-data.svg" alt="No permission" />
|
||||
<h3>Uh-oh! You don’t have permission to view this page.</h3>
|
||||
<p>
|
||||
You need the following permission to view this page:
|
||||
<br />
|
||||
<pre onClick={onClickToCopy}>
|
||||
{response.requiredPermissionName} <Copy size={12} />
|
||||
</pre>
|
||||
Ask your SigNoz administrator to grant access.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
export function createGuardedRoute<P extends object, R extends AuthZRelation>(
|
||||
Component: ComponentType<P>,
|
||||
relation: R,
|
||||
object: AuthZObject<R>,
|
||||
): ComponentType<P & RouteComponentProps<Record<string, string>>> {
|
||||
return function GuardedRouteComponent(
|
||||
props: P & RouteComponentProps<Record<string, string>>,
|
||||
): ReactElement {
|
||||
const resolvedObject = useMemo(() => {
|
||||
const paramPattern = /\{([^}]+)\}/g;
|
||||
return object.replace(paramPattern, (match, paramName) => {
|
||||
const paramValue = props.match?.params?.[paramName];
|
||||
return paramValue !== undefined ? paramValue : match;
|
||||
}) as AuthZObject<R>;
|
||||
}, [props.match?.params]);
|
||||
|
||||
return (
|
||||
<GuardAuthZ
|
||||
relation={relation}
|
||||
object={resolvedObject}
|
||||
fallbackOnLoading={<AppLoading />}
|
||||
fallbackOnError={onErrorFallback}
|
||||
fallbackOnNoPermissions={(response): ReactElement => (
|
||||
<OnNoPermissionsFallback {...response} />
|
||||
)}
|
||||
>
|
||||
<Component {...props} />
|
||||
</GuardAuthZ>
|
||||
);
|
||||
};
|
||||
}
|
||||
3
frontend/src/components/GuardAuthZ/index.ts
Normal file
3
frontend/src/components/GuardAuthZ/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { createGuardedRoute } from './createGuardedRoute';
|
||||
export type { GuardAuthZProps } from './GuardAuthZ';
|
||||
export { GuardAuthZ } from './GuardAuthZ';
|
||||
444
frontend/src/hooks/useAuthZ/useAuthZ.test.tsx
Normal file
444
frontend/src/hooks/useAuthZ/useAuthZ.test.tsx
Normal file
@@ -0,0 +1,444 @@
|
||||
import { ReactElement } from 'react';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { ENVIRONMENT } from 'constants/env';
|
||||
import { server } from 'mocks-server/server';
|
||||
import { rest } from 'msw';
|
||||
import { AllTheProviders } from 'tests/test-utils';
|
||||
|
||||
import { useAuthZ } from './useAuthZ';
|
||||
import { BrandedPermission, buildPermission } from './utils';
|
||||
|
||||
const BASE_URL = ENVIRONMENT.baseURL || '';
|
||||
|
||||
const wrapper = ({ children }: { children: ReactElement }): ReactElement => (
|
||||
<AllTheProviders>{children}</AllTheProviders>
|
||||
);
|
||||
|
||||
describe('useAuthZ', () => {
|
||||
it('should fetch and return permissions successfully', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
const mockApiResponse = {
|
||||
[permission1]: true,
|
||||
[permission2]: false,
|
||||
};
|
||||
|
||||
const expectedResponse = {
|
||||
[permission1]: {
|
||||
isGranted: true,
|
||||
},
|
||||
[permission2]: {
|
||||
isGranted: false,
|
||||
},
|
||||
};
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json(mockApiResponse));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([permission1, permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
expect(result.current.isLoading).toBe(true);
|
||||
expect(result.current.permissions).toBeNull();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.error).toBeNull();
|
||||
expect(result.current.permissions).toEqual(expectedResponse);
|
||||
});
|
||||
|
||||
it('should handle API errors', async () => {
|
||||
const permission = buildPermission('read', 'dashboard:*');
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(500), ctx.json({ error: 'Internal Server Error' }));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([permission]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.error).not.toBeNull();
|
||||
expect(result.current.permissions).toBeNull();
|
||||
});
|
||||
|
||||
it('should refetch when permissions array changes', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const body = req.body as {
|
||||
permissions: Array<{ relation: string; object: string }>;
|
||||
};
|
||||
|
||||
if (body.permissions.length === 1) {
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission1]: true,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission1]: true,
|
||||
[permission2]: false,
|
||||
[permission3]: true,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result, rerender } = renderHook<
|
||||
ReturnType<typeof useAuthZ>,
|
||||
BrandedPermission[]
|
||||
>((permissions) => useAuthZ(permissions), {
|
||||
wrapper,
|
||||
initialProps: [permission1],
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(result.current.permissions).toEqual({
|
||||
[permission1]: {
|
||||
isGranted: true,
|
||||
},
|
||||
});
|
||||
|
||||
rerender([permission1, permission2, permission3]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(result.current.permissions).toEqual({
|
||||
[permission1]: {
|
||||
isGranted: true,
|
||||
},
|
||||
[permission2]: {
|
||||
isGranted: false,
|
||||
},
|
||||
[permission3]: {
|
||||
isGranted: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should not refetch when permissions array order changes but content is the same', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
let requestCount = 0;
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
requestCount++;
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission1]: true,
|
||||
[permission2]: false,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result, rerender } = renderHook<
|
||||
ReturnType<typeof useAuthZ>,
|
||||
BrandedPermission[]
|
||||
>((permissions) => useAuthZ(permissions), {
|
||||
wrapper,
|
||||
initialProps: [permission1, permission2],
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
|
||||
rerender([permission2, permission1]);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle empty permissions array', async () => {
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, (_req, res, ctx) => {
|
||||
return res(ctx.status(200), ctx.json({}));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
expect(result.current.error).toBeNull();
|
||||
expect(result.current.permissions).toEqual({});
|
||||
});
|
||||
|
||||
it('should send correct payload format to API', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
|
||||
let receivedPayload: any = null;
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
|
||||
receivedPayload = await req.json();
|
||||
return res(
|
||||
ctx.status(200),
|
||||
ctx.json({
|
||||
[permission1]: true,
|
||||
[permission2]: false,
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
|
||||
const { result } = renderHook(() => useAuthZ([permission1, permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(receivedPayload).toEqual({
|
||||
permissions: [
|
||||
{ relation: 'read', object: 'dashboard:*' },
|
||||
{ relation: 'update', object: 'dashboard:123' },
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should batch multiple hooks into single flight request', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
const permission4 = buildPermission('create', 'dashboards');
|
||||
|
||||
let requestCount = 0;
|
||||
const receivedPayloads: any[] = [];
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
receivedPayloads.push(payload);
|
||||
|
||||
const allPermissions = [permission1, permission2, permission3, permission4];
|
||||
|
||||
const response: Record<string, boolean> = {};
|
||||
payload.permissions.forEach((p: { relation: string; object: string }) => {
|
||||
const perm = `${p.relation}/${p.object}` as BrandedPermission;
|
||||
if (allPermissions.includes(perm)) {
|
||||
response[perm] = perm === permission1 || perm === permission3;
|
||||
}
|
||||
});
|
||||
|
||||
return res(ctx.status(200), ctx.json(response));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
const { result: result3 } = renderHook(() => useAuthZ([permission3]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result1.current.isLoading).toBe(false);
|
||||
expect(result2.current.isLoading).toBe(false);
|
||||
expect(result3.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(receivedPayloads).toHaveLength(1);
|
||||
expect(receivedPayloads[0].permissions).toHaveLength(3);
|
||||
expect(receivedPayloads[0].permissions).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ relation: 'read', object: 'dashboard:*' },
|
||||
{ relation: 'update', object: 'dashboard:123' },
|
||||
{ relation: 'delete', object: 'dashboard:456' },
|
||||
]),
|
||||
);
|
||||
|
||||
expect(result1.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
});
|
||||
expect(result2.current.permissions).toEqual({
|
||||
[permission2]: { isGranted: false },
|
||||
});
|
||||
expect(result3.current.permissions).toEqual({
|
||||
[permission3]: { isGranted: true },
|
||||
});
|
||||
});
|
||||
|
||||
it('should create separate batches for calls after single flight window', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
const receivedPayloads: any[] = [];
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
receivedPayloads.push(payload);
|
||||
|
||||
const response: Record<string, boolean> = {};
|
||||
payload.permissions.forEach((p: { relation: string; object: string }) => {
|
||||
const perm = `${p.relation}/${p.object}` as BrandedPermission;
|
||||
response[perm] = perm === permission1 || perm === permission3;
|
||||
});
|
||||
|
||||
return res(ctx.status(200), ctx.json(response));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result1.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(receivedPayloads[0].permissions).toHaveLength(1);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
const { result: result3 } = renderHook(() => useAuthZ([permission3]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result2.current.isLoading).toBe(false);
|
||||
expect(result3.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(receivedPayloads).toHaveLength(2);
|
||||
expect(receivedPayloads[1].permissions).toHaveLength(2);
|
||||
expect(receivedPayloads[1].permissions).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ relation: 'update', object: 'dashboard:123' },
|
||||
{ relation: 'delete', object: 'dashboard:456' },
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not leak state between separate batches', async () => {
|
||||
const permission1 = buildPermission('read', 'dashboard:*');
|
||||
const permission2 = buildPermission('update', 'dashboard:123');
|
||||
const permission3 = buildPermission('delete', 'dashboard:456');
|
||||
|
||||
let requestCount = 0;
|
||||
|
||||
server.use(
|
||||
rest.post(`${BASE_URL}/api/v1/permissions/check`, async (req, res, ctx) => {
|
||||
requestCount++;
|
||||
const payload = await req.json();
|
||||
|
||||
const response: Record<string, boolean> = {};
|
||||
payload.permissions.forEach((p: { relation: string; object: string }) => {
|
||||
const perm = `${p.relation}/${p.object}` as BrandedPermission;
|
||||
response[perm] = perm === permission1 || perm === permission3;
|
||||
});
|
||||
|
||||
return res(ctx.status(200), ctx.json(response));
|
||||
}),
|
||||
);
|
||||
|
||||
const { result: result1 } = renderHook(() => useAuthZ([permission1]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result1.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(1);
|
||||
expect(result1.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
});
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
const { result: result2 } = renderHook(() => useAuthZ([permission2]), {
|
||||
wrapper,
|
||||
});
|
||||
|
||||
await waitFor(
|
||||
() => {
|
||||
expect(result2.current.isLoading).toBe(false);
|
||||
},
|
||||
{ timeout: 200 },
|
||||
);
|
||||
|
||||
expect(requestCount).toBe(2);
|
||||
expect(result1.current.permissions).toEqual({
|
||||
[permission1]: { isGranted: true },
|
||||
});
|
||||
expect(result2.current.permissions).toEqual({
|
||||
[permission2]: { isGranted: false },
|
||||
});
|
||||
expect(result1.current.permissions).not.toHaveProperty(permission2);
|
||||
expect(result2.current.permissions).not.toHaveProperty(permission1);
|
||||
});
|
||||
});
|
||||
136
frontend/src/hooks/useAuthZ/useAuthZ.tsx
Normal file
136
frontend/src/hooks/useAuthZ/useAuthZ.tsx
Normal file
@@ -0,0 +1,136 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useQueries } from 'react-query';
|
||||
import api from 'api';
|
||||
|
||||
import { BrandedPermission } from './utils';
|
||||
|
||||
export type UseAuthZPermissionResult = {
|
||||
isGranted: boolean;
|
||||
};
|
||||
|
||||
export type AuthZCheckResponse = Record<
|
||||
BrandedPermission,
|
||||
UseAuthZPermissionResult
|
||||
>;
|
||||
|
||||
export type UseAuthZResult = {
|
||||
isLoading: boolean;
|
||||
error: Error | null;
|
||||
permissions: AuthZCheckResponse | null;
|
||||
};
|
||||
|
||||
let ctx: Promise<AuthZCheckResponse> | null;
|
||||
let pendingPermissions: BrandedPermission[] = [];
|
||||
const SINGLE_FLIGHT_WAIT_TIME_MS = 50;
|
||||
const AUTHZ_CACHE_TIME = 20_000;
|
||||
|
||||
function dispatchPermission(
|
||||
permission: BrandedPermission,
|
||||
): Promise<AuthZCheckResponse> {
|
||||
pendingPermissions.push(permission);
|
||||
|
||||
if (!ctx) {
|
||||
let resolve: (v: AuthZCheckResponse) => void, reject: (reason?: any) => void;
|
||||
ctx = new Promise<AuthZCheckResponse>((r, re) => {
|
||||
resolve = r;
|
||||
reject = re;
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
const copiedPermissions = pendingPermissions.slice();
|
||||
pendingPermissions = [];
|
||||
ctx = null;
|
||||
|
||||
fetchManyPermissions(copiedPermissions).then(resolve).catch(reject);
|
||||
}, SINGLE_FLIGHT_WAIT_TIME_MS);
|
||||
}
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
async function fetchManyPermissions(
|
||||
permissions: BrandedPermission[],
|
||||
): Promise<AuthZCheckResponse> {
|
||||
const permissionsPayload = permissions.map((permission) => {
|
||||
const [relation, object] = permission.split('/');
|
||||
|
||||
return {
|
||||
relation,
|
||||
object,
|
||||
};
|
||||
});
|
||||
|
||||
const permissionsCheckResponse = await api
|
||||
.post<Record<string, boolean>>('/permissions/check', {
|
||||
permissions: permissionsPayload,
|
||||
})
|
||||
.then((response) => response.data);
|
||||
|
||||
return Object.keys(permissionsCheckResponse).reduce<AuthZCheckResponse>(
|
||||
(acc, permission): AuthZCheckResponse => {
|
||||
acc[permission as BrandedPermission] = {
|
||||
isGranted: !!permissionsCheckResponse[permission],
|
||||
};
|
||||
return acc;
|
||||
},
|
||||
{} as AuthZCheckResponse,
|
||||
);
|
||||
}
|
||||
|
||||
export function useAuthZ(permissions: BrandedPermission[]): UseAuthZResult {
|
||||
const queryResults = useQueries(
|
||||
permissions.map((permission) => {
|
||||
return {
|
||||
queryKey: ['authz', permission],
|
||||
cacheTime: AUTHZ_CACHE_TIME,
|
||||
refetchOnMount: false,
|
||||
refetchIntervalInBackground: false,
|
||||
refetchOnWindowFocus: false,
|
||||
refetchOnReconnect: true,
|
||||
queryFn: async (): Promise<AuthZCheckResponse> => {
|
||||
const response = await dispatchPermission(permission);
|
||||
|
||||
return {
|
||||
[permission]: {
|
||||
isGranted: response[permission].isGranted,
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
}),
|
||||
);
|
||||
|
||||
const isLoading = useMemo(() => queryResults.some((q) => q.isLoading), [
|
||||
queryResults,
|
||||
]);
|
||||
const error = useMemo(
|
||||
() =>
|
||||
!isLoading
|
||||
? (queryResults.find((q) => !!q.error)?.error as Error) || null
|
||||
: null,
|
||||
[isLoading, queryResults],
|
||||
);
|
||||
const data = useMemo(() => {
|
||||
if (isLoading || error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return queryResults.reduce((acc, q) => {
|
||||
if (!q.data) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(q.data)) {
|
||||
acc[key as BrandedPermission] = value;
|
||||
}
|
||||
|
||||
return acc;
|
||||
}, {} as AuthZCheckResponse);
|
||||
}, [isLoading, error, queryResults]);
|
||||
|
||||
return {
|
||||
isLoading,
|
||||
error,
|
||||
permissions: data ?? null,
|
||||
};
|
||||
}
|
||||
32
frontend/src/hooks/useAuthZ/utils.ts
Normal file
32
frontend/src/hooks/useAuthZ/utils.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
export type AuthZRelation =
|
||||
| 'list'
|
||||
| 'create'
|
||||
| 'read'
|
||||
| 'update'
|
||||
| 'delete'
|
||||
| 'assignee';
|
||||
export type AuthZResource = 'dashboard';
|
||||
export type AuthZObject<R extends AuthZRelation> = R extends 'list' | 'create'
|
||||
? `${AuthZResource}s`
|
||||
: `${AuthZResource}:${string}`;
|
||||
|
||||
export type AuthZPermissionCheck<R extends AuthZRelation> = {
|
||||
object: AuthZObject<R>;
|
||||
relation: R;
|
||||
};
|
||||
|
||||
export type BrandedPermission = string & { __brandedPermission: true };
|
||||
|
||||
export function buildPermission<R extends AuthZRelation>(
|
||||
relation: R,
|
||||
object: AuthZObject<R>,
|
||||
): BrandedPermission {
|
||||
return `${relation}/${object}` as BrandedPermission;
|
||||
}
|
||||
|
||||
export function buildObjectString(
|
||||
resource: AuthZResource,
|
||||
objectId: string,
|
||||
): `${AuthZResource}:${string}` {
|
||||
return `${resource}:${objectId}`;
|
||||
}
|
||||
287
go.mod
287
go.mod
@@ -3,22 +3,23 @@ module github.com/SigNoz/signoz
|
||||
go 1.24.0
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.2
|
||||
dario.cat/mergo v1.0.1
|
||||
github.com/AfterShip/clickhouse-sql-parser v0.4.16
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.1
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/SigNoz/govaluate v0.0.0-20240203125216-988004ccc7fd
|
||||
github.com/SigNoz/signoz-otel-collector v0.142.1-rc.1
|
||||
github.com/SigNoz/signoz-otel-collector v0.129.10-rc.9
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/antonmedv/expr v1.15.3
|
||||
github.com/bytedance/sonic v1.14.1
|
||||
github.com/cespare/xxhash/v2 v2.3.0
|
||||
github.com/coreos/go-oidc/v3 v3.16.0
|
||||
github.com/coreos/go-oidc/v3 v3.14.1
|
||||
github.com/dgraph-io/ristretto/v2 v2.3.0
|
||||
github.com/dustin/go-humanize v1.0.1
|
||||
github.com/gin-gonic/gin v1.11.0
|
||||
github.com/go-co-op/gocron v1.30.1
|
||||
github.com/go-openapi/runtime v0.28.0
|
||||
github.com/go-openapi/strfmt v0.24.0
|
||||
github.com/go-openapi/strfmt v0.23.0
|
||||
github.com/go-redis/redismock/v9 v9.2.0
|
||||
github.com/go-viper/mapstructure/v2 v2.4.0
|
||||
github.com/gojek/heimdall/v7 v7.0.3
|
||||
@@ -29,22 +30,22 @@ require (
|
||||
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674
|
||||
github.com/huandu/go-sqlbuilder v1.35.0
|
||||
github.com/jackc/pgx/v5 v5.7.6
|
||||
github.com/json-iterator/go v1.1.13-0.20220915233716-71ac16282d12
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/knadh/koanf v1.5.0
|
||||
github.com/knadh/koanf/v2 v2.3.0
|
||||
github.com/mailru/easyjson v0.9.0
|
||||
github.com/open-telemetry/opamp-go v0.22.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.142.0
|
||||
github.com/knadh/koanf/v2 v2.2.0
|
||||
github.com/mailru/easyjson v0.7.7
|
||||
github.com/open-telemetry/opamp-go v0.19.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza v0.128.0
|
||||
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67
|
||||
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/prometheus/alertmanager v0.28.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/prometheus/common v0.67.4
|
||||
github.com/prometheus/prometheus v0.308.0
|
||||
github.com/prometheus/common v0.66.1
|
||||
github.com/prometheus/prometheus v0.304.1
|
||||
github.com/redis/go-redis/extra/redisotel/v9 v9.15.1
|
||||
github.com/redis/go-redis/v9 v9.17.2
|
||||
github.com/redis/go-redis/v9 v9.15.1
|
||||
github.com/rs/cors v1.11.1
|
||||
github.com/russellhaering/gosaml2 v0.9.0
|
||||
github.com/russellhaering/goxmldsig v1.2.0
|
||||
@@ -53,7 +54,7 @@ require (
|
||||
github.com/sethvargo/go-password v0.2.0
|
||||
github.com/smartystreets/goconvey v1.8.1
|
||||
github.com/soheilhy/cmux v0.1.5
|
||||
github.com/spf13/cobra v1.10.2
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/srikanthccv/ClickHouse-go-mock v0.13.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/swaggest/jsonschema-go v0.3.78
|
||||
@@ -63,59 +64,43 @@ require (
|
||||
github.com/uptrace/bun/dialect/pgdialect v1.2.9
|
||||
github.com/uptrace/bun/dialect/sqlitedialect v1.2.9
|
||||
github.com/uptrace/bun/extra/bunotel v1.2.9
|
||||
go.opentelemetry.io/collector/confmap v1.48.0
|
||||
go.opentelemetry.io/collector/otelcol v0.142.0
|
||||
go.opentelemetry.io/collector/pdata v1.48.0
|
||||
go.opentelemetry.io/collector/confmap v1.34.0
|
||||
go.opentelemetry.io/collector/otelcol v0.128.0
|
||||
go.opentelemetry.io/collector/pdata v1.34.0
|
||||
go.opentelemetry.io/contrib/config v0.10.0
|
||||
go.opentelemetry.io/contrib/instrumentation/github.com/gorilla/mux/otelmux v0.63.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.64.0
|
||||
go.opentelemetry.io/otel v1.39.0
|
||||
go.opentelemetry.io/otel/metric v1.39.0
|
||||
go.opentelemetry.io/otel/sdk v1.39.0
|
||||
go.opentelemetry.io/otel/trace v1.39.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0
|
||||
go.opentelemetry.io/otel v1.38.0
|
||||
go.opentelemetry.io/otel/metric v1.38.0
|
||||
go.opentelemetry.io/otel/sdk v1.38.0
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
go.uber.org/multierr v1.11.0
|
||||
go.uber.org/zap v1.27.1
|
||||
go.uber.org/zap v1.27.0
|
||||
golang.org/x/crypto v0.46.0
|
||||
golang.org/x/exp v0.0.0-20251113190631-e25ba8c21ef6
|
||||
golang.org/x/net v0.48.0
|
||||
golang.org/x/oauth2 v0.33.0
|
||||
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b
|
||||
golang.org/x/net v0.47.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
golang.org/x/sync v0.19.0
|
||||
golang.org/x/text v0.32.0
|
||||
google.golang.org/protobuf v1.36.10
|
||||
google.golang.org/protobuf v1.36.9
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
gopkg.in/yaml.v3 v3.0.1
|
||||
k8s.io/apimachinery v0.35.0-alpha.0
|
||||
k8s.io/apimachinery v0.34.0
|
||||
modernc.org/sqlite v1.39.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/config v1.32.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/signin v1.0.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.30.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.9 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.41.1 // indirect
|
||||
github.com/aws/smithy-go v1.23.2 // indirect
|
||||
github.com/bytedance/gopkg v0.1.3 // indirect
|
||||
github.com/bytedance/sonic v1.14.1 // indirect
|
||||
github.com/bytedance/sonic/loader v0.3.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.6 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.27.0 // indirect
|
||||
github.com/goccy/go-yaml v1.19.0 // indirect
|
||||
github.com/goccy/go-yaml v1.18.0 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/prometheus/client_golang/exp v0.0.0-20250914183048-a974e0d45e0a // indirect
|
||||
github.com/redis/go-redis/extra/rediscmd/v9 v9.15.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/swaggest/refl v1.4.0 // indirect
|
||||
@@ -123,27 +108,24 @@ require (
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.3.0 // indirect
|
||||
github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect
|
||||
go.opentelemetry.io/collector/client v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configoptional v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exporterhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/xpdata v0.142.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.opentelemetry.io/collector/config/configretry v1.34.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
golang.org/x/arch v0.20.0 // indirect
|
||||
golang.org/x/tools/godoc v0.1.0-deprecated // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
cel.dev/expr v0.25.1 // indirect
|
||||
cloud.google.com/go/auth v0.17.0 // indirect
|
||||
cel.dev/expr v0.24.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.1 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.9.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.20.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.13.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.2 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.6.0 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.8.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect
|
||||
github.com/ClickHouse/ch-go v0.67.0 // indirect
|
||||
github.com/Masterminds/squirrel v1.5.4 // indirect
|
||||
github.com/Yiling-J/theine-go v0.6.2 // indirect
|
||||
@@ -151,35 +133,35 @@ require (
|
||||
github.com/andybalholm/brotli v1.2.0 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.8 // indirect
|
||||
github.com/aws/aws-sdk-go v1.55.7 // indirect
|
||||
github.com/beevik/etree v1.1.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/coder/quartz v0.1.2 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dennwc/varint v1.0.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/ebitengine/purego v0.9.1 // indirect
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/edsrzf/mmap-go v1.2.0 // indirect
|
||||
github.com/elastic/lunes v0.2.0 // indirect
|
||||
github.com/elastic/lunes v0.1.0 // indirect
|
||||
github.com/emirpasic/gods v1.18.1 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/expr-lang/expr v1.17.7
|
||||
github.com/expr-lang/expr v1.17.5
|
||||
github.com/facette/natsort v0.0.0-20181210072756-2cd4dd1e2dcb // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/fsnotify/fsnotify v1.9.0 // indirect
|
||||
github.com/go-faster/city v1.0.1 // indirect
|
||||
github.com/go-faster/errors v0.7.1 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.1 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-openapi/analysis v0.23.0 // indirect
|
||||
github.com/go-openapi/errors v0.22.3 // indirect
|
||||
github.com/go-openapi/errors v0.22.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/loads v0.22.0 // indirect
|
||||
@@ -196,19 +178,19 @@ require (
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/cel-go v0.26.1 // indirect
|
||||
github.com/google/s2a-go v0.1.9 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.7 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gopherjs/gopherjs v1.17.2 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20250905093917-f7b3be9d1853 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.2 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
|
||||
github.com/hashicorp/go-version v1.8.0 // indirect
|
||||
github.com/hashicorp/go-version v1.7.0 // indirect
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.1 // indirect
|
||||
@@ -224,21 +206,21 @@ require (
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/jtolds/gls v4.20.0+incompatible // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.3.0 // indirect
|
||||
github.com/leodido/go-syslog/v4 v4.2.0 // indirect
|
||||
github.com/leodido/ragel-machinery v0.0.0-20190525184631-5f46317e436b // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20250317134145-8bc96cf8fc35 // indirect
|
||||
github.com/magefile/mage v1.15.0 // indirect
|
||||
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mdlayher/socket v0.5.1 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/mfridman/interpolate v0.0.2 // indirect
|
||||
github.com/miekg/dns v1.1.68 // indirect
|
||||
github.com/miekg/dns v1.1.65 // indirect
|
||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c // indirect
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
@@ -247,14 +229,14 @@ require (
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/natefinch/wrap v0.2.0 // indirect
|
||||
github.com/oklog/run v1.2.0 // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.1
|
||||
github.com/open-feature/go-sdk v1.17.0
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.142.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/coreinternal v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.128.0 // indirect
|
||||
github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.128.0 // indirect
|
||||
github.com/openfga/openfga v1.10.1
|
||||
github.com/paulmach/orb v0.11.1 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
|
||||
@@ -264,10 +246,10 @@ require (
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect
|
||||
github.com/pressly/goose/v3 v3.25.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.15.0 // indirect
|
||||
github.com/prometheus/otlptranslator v1.0.0 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/prometheus/sigv4 v0.3.0 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/otlptranslator v0.0.0-20250320144820-d800c8b0eb07 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/prometheus/sigv4 v0.1.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/sagikazarmark/locafero v0.9.0 // indirect
|
||||
@@ -275,7 +257,7 @@ require (
|
||||
github.com/segmentio/asm v1.2.0 // indirect
|
||||
github.com/segmentio/backo-go v1.0.1 // indirect
|
||||
github.com/sethvargo/go-retry v0.3.0 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.11 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.5 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c // indirect
|
||||
github.com/shurcooL/vfsgen v0.0.0-20230704071429-0000e147ea92 // indirect
|
||||
@@ -290,9 +272,9 @@ require (
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/swaggest/openapi-go v0.2.60
|
||||
github.com/tidwall/match v1.1.1 // indirect
|
||||
github.com/tidwall/pretty v1.2.1 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.16 // indirect
|
||||
github.com/tklauser/numcpus v0.11.0 // indirect
|
||||
github.com/tidwall/pretty v1.2.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.15 // indirect
|
||||
github.com/tklauser/numcpus v0.10.0 // indirect
|
||||
github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect
|
||||
github.com/trivago/tgo v1.0.7 // indirect
|
||||
github.com/valyala/fastjson v1.6.4 // indirect
|
||||
@@ -301,82 +283,83 @@ require (
|
||||
github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.4 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/collector/component v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.48.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.1-0.20250610090210-188191247685
|
||||
go.opentelemetry.io/collector/service v0.142.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.142.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.13.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.18.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.60.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.15.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.14.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.39.0
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.mongodb.org/mongo-driver v1.17.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.1.0 // indirect
|
||||
go.opentelemetry.io/collector/component v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componentstatus v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/component/componenttest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/config/configtelemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/envprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/provider/fileprovider v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/confmap/xconfmap v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/connectortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/connector/xconnector v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumererror v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/consumertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/consumer/xconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/exportertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/exporter/xexporter v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensioncapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/extensiontest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/extension/xextension v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/featuregate v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/fanoutconsumer v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/internal/telemetry v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/pprofile v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pdata/testdata v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/pipeline/xpipeline v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processorhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/processortest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/processor/xprocessor v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver v1.34.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receiverhelper v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/receivertest v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/receiver/xreceiver v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/semconv v0.128.0
|
||||
go.opentelemetry.io/collector/service v0.128.0 // indirect
|
||||
go.opentelemetry.io/collector/service/hostcapabilities v0.128.0 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelzap v0.11.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 // indirect
|
||||
go.opentelemetry.io/contrib/otelconf v0.16.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploggrpc v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/prometheus v0.58.0
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutlog v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.38.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.12.2 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.38.0
|
||||
go.opentelemetry.io/proto/otlp v1.8.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/mod v0.30.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/time v0.11.0 // indirect
|
||||
golang.org/x/tools v0.39.0 // indirect
|
||||
gonum.org/v1/gonum v0.16.0 // indirect
|
||||
google.golang.org/api v0.257.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217 // indirect
|
||||
google.golang.org/grpc v1.77.0 // indirect
|
||||
google.golang.org/api v0.236.0
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
gopkg.in/telebot.v3 v3.3.8 // indirect
|
||||
k8s.io/client-go v0.34.2 // indirect
|
||||
k8s.io/client-go v0.34.0 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20250604170112-4c0f3b243397 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
)
|
||||
|
||||
replace github.com/expr-lang/expr => github.com/SigNoz/expr v1.17.7-beta
|
||||
|
||||
@@ -76,7 +76,7 @@ func (m *module) ListPromotedAndIndexedPaths(ctx context.Context) ([]promotetype
|
||||
|
||||
// add the paths that are not promoted but have indexes
|
||||
for path, indexes := range aggr {
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path := strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = telemetrytypes.BodyJSONStringSearchPrefix + path
|
||||
response = append(response, promotetypes.PromotePath{
|
||||
Path: path,
|
||||
@@ -156,7 +156,7 @@ func (m *module) PromoteAndIndexPaths(
|
||||
}
|
||||
}
|
||||
if len(it.Indexes) > 0 {
|
||||
parentColumn := telemetrylogs.LogsV2BodyV2Column
|
||||
parentColumn := telemetrylogs.LogsV2BodyJSONColumn
|
||||
// if the path is already promoted or is being promoted, add it to the promoted column
|
||||
if _, promoted := existingPromotedPaths[it.Path]; promoted || it.Promote {
|
||||
parentColumn = telemetrylogs.LogsV2BodyPromotedColumn
|
||||
|
||||
@@ -2,7 +2,6 @@ package prometheus
|
||||
|
||||
import (
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -16,24 +15,30 @@ func RemoveExtraLabels(res *promql.Result, labelsToRemove ...string) error {
|
||||
toRemove[l] = struct{}{}
|
||||
}
|
||||
|
||||
dropLabels := func(metric labels.Labels) labels.Labels {
|
||||
b := labels.NewBuilder(metric)
|
||||
for name := range toRemove {
|
||||
b.Del(name)
|
||||
}
|
||||
return b.Labels()
|
||||
}
|
||||
|
||||
switch res.Value.(type) {
|
||||
case promql.Vector:
|
||||
value := res.Value.(promql.Vector)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Matrix:
|
||||
value := res.Value.(promql.Matrix)
|
||||
for i := range value {
|
||||
(value)[i].Metric = dropLabels((value)[i].Metric)
|
||||
series := &(value)[i]
|
||||
dst := series.Metric[:0]
|
||||
for _, lbl := range series.Metric {
|
||||
if _, drop := toRemove[lbl.Name]; !drop {
|
||||
dst = append(dst, lbl)
|
||||
}
|
||||
}
|
||||
series.Metric = dst
|
||||
}
|
||||
case promql.Scalar:
|
||||
return nil
|
||||
|
||||
@@ -10,9 +10,11 @@ import (
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrylogs"
|
||||
"github.com/SigNoz/signoz/pkg/telemetrystore"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
type builderQuery[T any] struct {
|
||||
@@ -248,6 +250,40 @@ func (q *builderQuery[T]) executeWithContext(ctx context.Context, query string,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// merge body_json and promoted into body
|
||||
if q.spec.Signal == telemetrytypes.SignalLogs {
|
||||
switch typedPayload := payload.(type) {
|
||||
case *qbtypes.RawData:
|
||||
for _, rr := range typedPayload.Rows {
|
||||
seeder := func() error {
|
||||
body, ok := rr.Data[telemetrylogs.LogsV2BodyJSONColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
promoted, ok := rr.Data[telemetrylogs.LogsV2BodyPromotedColumn].(map[string]any)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
seed(promoted, body)
|
||||
str, err := sonic.MarshalString(body)
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "failed to marshal body")
|
||||
}
|
||||
rr.Data["body"] = str
|
||||
return nil
|
||||
}
|
||||
err := seeder()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyJSONColumn)
|
||||
delete(rr.Data, telemetrylogs.LogsV2BodyPromotedColumn)
|
||||
}
|
||||
payload = typedPayload
|
||||
}
|
||||
}
|
||||
|
||||
return &qbtypes.Result{
|
||||
Type: q.kind,
|
||||
Value: payload,
|
||||
@@ -375,3 +411,18 @@ func decodeCursor(cur string) (int64, error) {
|
||||
}
|
||||
return strconv.ParseInt(string(b), 10, 64)
|
||||
}
|
||||
|
||||
func seed(promoted map[string]any, body map[string]any) {
|
||||
for key, fromValue := range promoted {
|
||||
if toValue, ok := body[key]; !ok {
|
||||
body[key] = fromValue
|
||||
} else {
|
||||
if fromValue, ok := fromValue.(map[string]any); ok {
|
||||
if toValue, ok := toValue.(map[string]any); ok {
|
||||
seed(fromValue, toValue)
|
||||
body[key] = toValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2/lib/driver"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/bytedance/sonic"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -393,11 +394,17 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
|
||||
|
||||
// de-reference the typed pointer to any
|
||||
val := reflect.ValueOf(cellPtr).Elem().Interface()
|
||||
// Post-process JSON columns: normalize into String value
|
||||
|
||||
// Post-process JSON columns: normalize into structured values
|
||||
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
|
||||
switch x := val.(type) {
|
||||
case []byte:
|
||||
val = string(x)
|
||||
if len(x) > 0 {
|
||||
var v any
|
||||
if err := sonic.Unmarshal(x, &v); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
default:
|
||||
// already a structured type (map[string]any, []any, etc.)
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ import (
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbv5 "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
@@ -241,13 +240,13 @@ func (q *promqlQuery) Execute(ctx context.Context) (*qbv5.Result, error) {
|
||||
var series []*qbv5.TimeSeries
|
||||
for _, v := range matrix {
|
||||
var s qbv5.TimeSeries
|
||||
lbls := make([]*qbv5.Label, 0, v.Metric.Len())
|
||||
v.Metric.Range(func(l labels.Label) {
|
||||
lbls := make([]*qbv5.Label, 0, len(v.Metric))
|
||||
for name, value := range v.Metric.Copy().Map() {
|
||||
lbls = append(lbls, &qbv5.Label{
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: l.Name},
|
||||
Value: l.Value,
|
||||
Key: telemetrytypes.TelemetryFieldKey{Name: name},
|
||||
Value: value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
s.Labels = lbls
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/ruletypes"
|
||||
"github.com/SigNoz/signoz/pkg/valuer"
|
||||
"github.com/prometheus/prometheus/model/labels"
|
||||
"github.com/prometheus/prometheus/promql"
|
||||
)
|
||||
|
||||
@@ -462,12 +461,12 @@ func toCommonSeries(series promql.Series) v3.Series {
|
||||
Points: make([]v3.Point, 0),
|
||||
}
|
||||
|
||||
series.Metric.Range(func(l labels.Label) {
|
||||
commonSeries.Labels[l.Name] = l.Value
|
||||
for _, lbl := range series.Metric {
|
||||
commonSeries.Labels[lbl.Name] = lbl.Value
|
||||
commonSeries.LabelsArray = append(commonSeries.LabelsArray, map[string]string{
|
||||
l.Name: l.Value,
|
||||
lbl.Name: lbl.Value,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
for _, f := range series.Floats {
|
||||
commonSeries.Points = append(commonSeries.Points, v3.Point{
|
||||
|
||||
@@ -204,10 +204,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// While we expect user not to send the mixed data types, it inevitably happens
|
||||
// So we handle the data type collisions here
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString, telemetrytypes.FieldDataTypeJSON:
|
||||
if key.FieldDataType == telemetrytypes.FieldDataTypeJSON {
|
||||
tblFieldName = fmt.Sprintf("toString(%s)", tblFieldName)
|
||||
}
|
||||
case telemetrytypes.FieldDataTypeString, telemetrytypes.FieldDataTypeArrayString:
|
||||
switch v := value.(type) {
|
||||
case float64:
|
||||
// try to convert the string value to to number
|
||||
@@ -222,6 +219,7 @@ func DataTypeCollisionHandledFieldName(key *telemetrytypes.TelemetryFieldKey, va
|
||||
// we don't have a toBoolOrNull in ClickHouse, so we need to convert the bool to a string
|
||||
value = fmt.Sprintf("%t", v)
|
||||
}
|
||||
|
||||
case telemetrytypes.FieldDataTypeInt64,
|
||||
telemetrytypes.FieldDataTypeArrayInt64,
|
||||
telemetrytypes.FieldDataTypeNumber,
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"slices"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -384,7 +383,6 @@ func (v *filterExpressionVisitor) VisitComparison(ctx *grammar.ComparisonContext
|
||||
for _, key := range keys {
|
||||
condition, err := v.conditionBuilder.ConditionFor(context.Background(), key, op, nil, v.builder, v.startNs, v.endNs)
|
||||
if err != nil {
|
||||
v.errors = append(v.errors, fmt.Sprintf("failed to build condition: %s", err.Error()))
|
||||
return ""
|
||||
}
|
||||
conds = append(conds, condition)
|
||||
@@ -909,13 +907,6 @@ func (v *filterExpressionVisitor) VisitKey(ctx *grammar.KeyContext) any {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(Piyush): Handle this better; Use constants for the strings later
|
||||
if BodyJSONQueryEnabled && fieldKey.Name == "body" && slices.ContainsFunc(fieldKeysForName, func(key *telemetrytypes.TelemetryFieldKey) bool {
|
||||
return key.Name == "message" && key.FieldDataType == telemetrytypes.FieldDataTypeString && key.FieldContext == telemetrytypes.FieldContextBody
|
||||
}) {
|
||||
v.warnings = append(v.warnings, "You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search.")
|
||||
}
|
||||
|
||||
if len(fieldKeysForName) > 1 {
|
||||
warnMsg := fmt.Sprintf(
|
||||
"Key `%s` is ambiguous, found %d different combinations of field context / data type: %v.",
|
||||
|
||||
@@ -35,19 +35,13 @@ func (c *conditionBuilder) conditionFor(
|
||||
return "", err
|
||||
}
|
||||
|
||||
if column.Type.GetType() == schema.ColumnTypeEnumJSON {
|
||||
// If field data is Not JSON Column Itself, then we need to build a JSON condition
|
||||
if querybuilder.BodyJSONQueryEnabled && key.MustBuildJSONCondition() {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
} else if key.FieldDataType == telemetrytypes.FieldDataTypeJSON && !operator.IsOpValidForJSON() {
|
||||
// Skip building condition for invalid operators on JSON columns
|
||||
return "", nil
|
||||
if column.IsJSONColumn() && querybuilder.BodyJSONQueryEnabled {
|
||||
valueType, value := InferDataType(value, operator, key)
|
||||
cond, err := NewJSONConditionBuilder(key, valueType).buildJSONCondition(operator, value, sb)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return cond, nil
|
||||
}
|
||||
|
||||
if operator.IsStringSearchOperator() {
|
||||
@@ -114,6 +108,7 @@ func (c *conditionBuilder) conditionFor(
|
||||
return sb.ILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
case qbtypes.FilterOperatorNotContains:
|
||||
return sb.NotILike(tblFieldName, fmt.Sprintf("%%%s%%", value)), nil
|
||||
|
||||
case qbtypes.FilterOperatorRegexp:
|
||||
// Note: Escape $$ to $$$$ to avoid sqlbuilder interpreting materialized $ signs
|
||||
// Only needed because we are using sprintf instead of sb.Match (not implemented in sqlbuilder)
|
||||
@@ -181,16 +176,9 @@ func (c *conditionBuilder) conditionFor(
|
||||
var value any
|
||||
switch column.Type.GetType() {
|
||||
case schema.ColumnTypeEnumJSON:
|
||||
switch key.FieldDataType {
|
||||
case telemetrytypes.FieldDataTypeJSON:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), false), nil
|
||||
}
|
||||
return sb.EQ(fmt.Sprintf("empty(%s)", tblFieldName), true), nil
|
||||
default:
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
}
|
||||
if operator == qbtypes.FilterOperatorExists {
|
||||
return sb.IsNotNull(tblFieldName), nil
|
||||
} else {
|
||||
return sb.IsNull(tblFieldName), nil
|
||||
}
|
||||
case schema.ColumnTypeEnumLowCardinality:
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
package telemetrylogs
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
|
||||
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
|
||||
)
|
||||
@@ -20,7 +17,7 @@ const (
|
||||
LogsV2TimestampColumn = "timestamp"
|
||||
LogsV2ObservedTimestampColumn = "observed_timestamp"
|
||||
LogsV2BodyColumn = "body"
|
||||
LogsV2BodyV2Column = constants.BodyV2Column
|
||||
LogsV2BodyJSONColumn = constants.BodyJSONColumn
|
||||
LogsV2BodyPromotedColumn = constants.BodyPromotedColumn
|
||||
LogsV2TraceIDColumn = "trace_id"
|
||||
LogsV2SpanIDColumn = "span_id"
|
||||
@@ -37,20 +34,11 @@ const (
|
||||
LogsV2ResourcesStringColumn = "resources_string"
|
||||
LogsV2ScopeStringColumn = "scope_string"
|
||||
|
||||
BodyV2ColumnPrefix = constants.BodyV2ColumnPrefix
|
||||
BodyJSONColumnPrefix = constants.BodyJSONColumnPrefix
|
||||
BodyPromotedColumnPrefix = constants.BodyPromotedColumnPrefix
|
||||
MessageSubColumn = "message"
|
||||
)
|
||||
|
||||
var (
|
||||
// mapping for body logical field to message sub column
|
||||
BodyLogicalFieldJSONMapping = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: MessageSubColumn,
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
FieldContext: telemetrytypes.FieldContextBody,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
JSONDataType: &telemetrytypes.String,
|
||||
}
|
||||
DefaultFullTextColumn = &telemetrytypes.TelemetryFieldKey{
|
||||
Name: "body",
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
@@ -130,29 +118,3 @@ var (
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
func bodyAliasExpression() string {
|
||||
if !querybuilder.BodyJSONQueryEnabled {
|
||||
return LogsV2BodyColumn
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s as body", LogsV2BodyV2Column)
|
||||
}
|
||||
|
||||
func init() {
|
||||
// body logical field is mapped to message field in the body context that too only with String data type
|
||||
err := BodyLogicalFieldJSONMapping.SetJSONAccessPlan(telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: LogsV2BodyV2Column,
|
||||
PromotedColumn: LogsV2BodyPromotedColumn,
|
||||
}, map[string][]telemetrytypes.JSONDataType{
|
||||
MessageSubColumn: {telemetrytypes.String},
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
DefaultFullTextColumn = BodyLogicalFieldJSONMapping
|
||||
IntrinsicFields["body"] = *BodyLogicalFieldJSONMapping
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ var (
|
||||
"severity_text": {Name: "severity_text", Type: schema.LowCardinalityColumnType{ElementType: schema.ColumnTypeString}},
|
||||
"severity_number": {Name: "severity_number", Type: schema.ColumnTypeUInt8},
|
||||
"body": {Name: "body", Type: schema.ColumnTypeString},
|
||||
LogsV2BodyV2Column: {Name: LogsV2BodyV2Column, Type: schema.JSONColumnType{
|
||||
LogsV2BodyJSONColumn: {Name: LogsV2BodyJSONColumn, Type: schema.JSONColumnType{
|
||||
MaxDynamicTypes: utils.ToPointer(uint(32)),
|
||||
MaxDynamicPaths: utils.ToPointer(uint(0)),
|
||||
}},
|
||||
@@ -89,9 +89,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
}
|
||||
case telemetrytypes.FieldContextBody:
|
||||
// Body context is for JSON body fields
|
||||
// Use body_v2 if feature flag is enabled
|
||||
// Use body_json if feature flag is enabled
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
@@ -100,9 +100,9 @@ func (m *fieldMapper) getColumn(_ context.Context, key *telemetrytypes.Telemetry
|
||||
if !ok {
|
||||
// check if the key has body JSON search
|
||||
if strings.HasPrefix(key.Name, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
// Use body_v2 if feature flag is enabled and we have a body condition builder
|
||||
// Use body_json if feature flag is enabled and we have a body condition builder
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
return logsV2Columns[LogsV2BodyV2Column], nil
|
||||
return logsV2Columns[LogsV2BodyJSONColumn], nil
|
||||
}
|
||||
// Fall back to legacy body column
|
||||
return logsV2Columns["body"], nil
|
||||
@@ -147,9 +147,6 @@ func (m *fieldMapper) FieldFor(ctx context.Context, key *telemetrytypes.Telemetr
|
||||
}
|
||||
|
||||
return m.buildFieldForJSON(key)
|
||||
case telemetrytypes.FieldContextLog:
|
||||
// return the column name as is for log context fields
|
||||
return column.Name, nil
|
||||
default:
|
||||
return "", errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "only resource/body context fields are supported for json columns, got %s", key.FieldContext.String)
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ func NewJSONConditionBuilder(key *telemetrytypes.TelemetryFieldKey, valueType te
|
||||
return &jsonConditionBuilder{key: key, valueType: telemetrytypes.MappingFieldDataTypeToJSONDataType[valueType]}
|
||||
}
|
||||
|
||||
// BuildCondition builds the full WHERE condition for body_v2 JSON paths
|
||||
// BuildCondition builds the full WHERE condition for body_json JSON paths
|
||||
func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperator, value any, sb *sqlbuilder.SelectBuilder) (string, error) {
|
||||
conditions := []string{}
|
||||
for _, node := range c.key.JSONPlan {
|
||||
@@ -40,7 +40,6 @@ func (c *jsonConditionBuilder) buildJSONCondition(operator qbtypes.FilterOperato
|
||||
}
|
||||
conditions = append(conditions, condition)
|
||||
}
|
||||
|
||||
return sb.Or(conditions...), nil
|
||||
}
|
||||
|
||||
@@ -289,9 +288,9 @@ func (c *jsonConditionBuilder) applyOperator(sb *sqlbuilder.SelectBuilder, field
|
||||
}
|
||||
return sb.NotIn(fieldExpr, values...), nil
|
||||
case qbtypes.FilterOperatorExists:
|
||||
return sb.IsNotNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NOT NULL", fieldExpr), nil
|
||||
case qbtypes.FilterOperatorNotExists:
|
||||
return sb.IsNull(fieldExpr), nil
|
||||
return fmt.Sprintf("%s IS NULL", fieldExpr), nil
|
||||
// between and not between
|
||||
case qbtypes.FilterOperatorBetween, qbtypes.FilterOperatorNotBetween:
|
||||
values, ok := value.([]any)
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -203,6 +203,7 @@ func (b *logQueryStatementBuilder) adjustKeys(ctx context.Context, keys map[stri
|
||||
}
|
||||
|
||||
func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldKey, keys map[string][]*telemetrytypes.TelemetryFieldKey) []string {
|
||||
|
||||
// First check if it matches with any intrinsic fields
|
||||
var intrinsicOrCalculatedField telemetrytypes.TelemetryFieldKey
|
||||
if _, ok := IntrinsicFields[key.Name]; ok {
|
||||
@@ -211,6 +212,7 @@ func (b *logQueryStatementBuilder) adjustKey(key *telemetrytypes.TelemetryFieldK
|
||||
}
|
||||
|
||||
return querybuilder.AdjustKey(key, keys, nil)
|
||||
|
||||
}
|
||||
|
||||
// buildListQuery builds a query for list panel type
|
||||
@@ -247,7 +249,11 @@ func (b *logQueryStatementBuilder) buildListQuery(
|
||||
sb.SelectMore(LogsV2SeverityNumberColumn)
|
||||
sb.SelectMore(LogsV2ScopeNameColumn)
|
||||
sb.SelectMore(LogsV2ScopeVersionColumn)
|
||||
sb.SelectMore(bodyAliasExpression())
|
||||
sb.SelectMore(LogsV2BodyColumn)
|
||||
if querybuilder.BodyJSONQueryEnabled {
|
||||
sb.SelectMore(LogsV2BodyJSONColumn)
|
||||
sb.SelectMore(LogsV2BodyPromotedColumn)
|
||||
}
|
||||
sb.SelectMore(LogsV2AttributesStringColumn)
|
||||
sb.SelectMore(LogsV2AttributesNumberColumn)
|
||||
sb.SelectMore(LogsV2AttributesBoolColumn)
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/SigNoz/signoz/pkg/errors"
|
||||
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder"
|
||||
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
|
||||
@@ -887,154 +886,3 @@ func TestAdjustKey(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestStmtBuilderBodyField(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
requestType qbtypes.RequestType
|
||||
query qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]
|
||||
enableBodyJSONQuery bool
|
||||
expected qbtypes.Statement
|
||||
expectedErr error
|
||||
}{
|
||||
{
|
||||
name: "body_exists",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body Exists"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') IS NOT NULL) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_exists_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body Exists"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_empty",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body == ''"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (dynamicElement(body_v2.`message`, 'String') = ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_empty_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body == ''"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_contains",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: true,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (LOWER(dynamicElement(body_v2.`message`, 'String')) LIKE LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
Warnings: []string{"You searched for body. Query Builder now defaults this to body.message:string. Check that this matches what you want to search."},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
{
|
||||
name: "body_contains_disabled",
|
||||
requestType: qbtypes.RequestTypeRaw,
|
||||
query: qbtypes.QueryBuilderQuery[qbtypes.LogAggregation]{
|
||||
Signal: telemetrytypes.SignalLogs,
|
||||
Filter: &qbtypes.Filter{Expression: "body CONTAINS 'error'"},
|
||||
Limit: 10,
|
||||
},
|
||||
enableBodyJSONQuery: false,
|
||||
expected: qbtypes.Statement{
|
||||
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
|
||||
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
|
||||
},
|
||||
expectedErr: nil,
|
||||
},
|
||||
}
|
||||
|
||||
fm := NewFieldMapper()
|
||||
cb := NewConditionBuilder(fm)
|
||||
|
||||
enable, disable := jsonQueryTestUtil(t)
|
||||
defer disable()
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
if c.enableBodyJSONQuery {
|
||||
enable()
|
||||
} else {
|
||||
disable()
|
||||
}
|
||||
// build the key map after enabling/disabling body JSON query
|
||||
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
|
||||
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
|
||||
|
||||
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
|
||||
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
|
||||
statementBuilder := NewLogQueryStatementBuilder(
|
||||
instrumentationtest.New().ToProviderSettings(),
|
||||
mockMetadataStore,
|
||||
fm,
|
||||
cb,
|
||||
resourceFilterStmtBuilder,
|
||||
aggExprRewriter,
|
||||
DefaultFullTextColumn,
|
||||
GetBodyJSONKey,
|
||||
)
|
||||
|
||||
q, err := statementBuilder.Build(context.Background(), 1747947419000, 1747983448000, c.requestType, c.query, nil)
|
||||
if c.expectedErr != nil {
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr.Error())
|
||||
} else {
|
||||
if err != nil {
|
||||
_, _, _, _, _, add := errors.Unwrapb(err)
|
||||
t.Logf("error additionals: %v", add)
|
||||
}
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.expected.Query, q.Query)
|
||||
require.Equal(t, c.expected.Args, q.Args)
|
||||
require.Equal(t, c.expected.Warnings, q.Warnings)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,6 +27,13 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"body": {
|
||||
{
|
||||
Name: "body",
|
||||
FieldContext: telemetrytypes.FieldContextLog,
|
||||
FieldDataType: telemetrytypes.FieldDataTypeString,
|
||||
},
|
||||
},
|
||||
"http.status_code": {
|
||||
{
|
||||
Name: "http.status_code",
|
||||
@@ -938,11 +945,6 @@ func buildCompleteFieldKeyMap() map[string][]*telemetrytypes.TelemetryFieldKey {
|
||||
key.Signal = telemetrytypes.SignalLogs
|
||||
}
|
||||
}
|
||||
|
||||
// add intrinsic fields to the map
|
||||
for fieldName, key := range IntrinsicFields {
|
||||
keysMap[fieldName] = append(keysMap[fieldName], &key)
|
||||
}
|
||||
return keysMap
|
||||
}
|
||||
|
||||
|
||||
@@ -254,7 +254,7 @@ func buildListLogsJSONIndexesQuery(cluster string, filters ...string) (string, [
|
||||
sb.Where(sb.Equal("database", telemetrylogs.DBName))
|
||||
sb.Where(sb.Equal("table", telemetrylogs.LogsV2LocalTableName))
|
||||
sb.Where(sb.Or(
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix))),
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix))),
|
||||
sb.ILike("expr", fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix))),
|
||||
))
|
||||
|
||||
@@ -338,7 +338,7 @@ func (t *telemetryMetaStore) ListJSONValues(ctx context.Context, path string, li
|
||||
if promoted {
|
||||
path = telemetrylogs.BodyPromotedColumnPrefix + path
|
||||
} else {
|
||||
path = telemetrylogs.BodyV2ColumnPrefix + path
|
||||
path = telemetrylogs.BodyJSONColumnPrefix + path
|
||||
}
|
||||
|
||||
from := fmt.Sprintf("%s.%s", telemetrylogs.DBName, telemetrylogs.LogsV2TableName)
|
||||
@@ -529,7 +529,7 @@ func (t *telemetryMetaStore) GetPromotedPaths(ctx context.Context, paths ...stri
|
||||
// TODO(Piyush): Remove this function
|
||||
func CleanPathPrefixes(path string) string {
|
||||
path = strings.TrimPrefix(path, telemetrytypes.BodyJSONStringSearchPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyV2ColumnPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyJSONColumnPrefix)
|
||||
path = strings.TrimPrefix(path, telemetrylogs.BodyPromotedColumnPrefix)
|
||||
return path
|
||||
}
|
||||
|
||||
@@ -117,7 +117,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
|
||||
},
|
||||
},
|
||||
@@ -130,7 +130,7 @@ func TestBuildListLogsJSONIndexesQuery(t *testing.T) {
|
||||
expectedArgs: []any{
|
||||
telemetrylogs.DBName,
|
||||
telemetrylogs.LogsV2LocalTableName,
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyV2ColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyJSONColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains(constants.BodyPromotedColumnPrefix)),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("foo")),
|
||||
fmt.Sprintf("%%%s%%", querybuilder.FormatValueForContains("bar")),
|
||||
|
||||
@@ -100,7 +100,7 @@ func NewTelemetryMetaStore(
|
||||
jsonColumnMetadata: map[telemetrytypes.Signal]map[telemetrytypes.FieldContext]telemetrytypes.JSONColumnMetadata{
|
||||
telemetrytypes.SignalLogs: {
|
||||
telemetrytypes.FieldContextBody: telemetrytypes.JSONColumnMetadata{
|
||||
BaseColumn: telemetrylogs.LogsV2BodyV2Column,
|
||||
BaseColumn: telemetrylogs.LogsV2BodyJSONColumn,
|
||||
PromotedColumn: telemetrylogs.LogsV2BodyPromotedColumn,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -7,6 +7,6 @@ const (
|
||||
AttributesMetadataTableName = "distributed_attributes_metadata"
|
||||
AttributesMetadataLocalTableName = "attributes_metadata"
|
||||
PathTypesTableName = otelcollectorconst.DistributedPathTypesTable
|
||||
PromotedPathsTableName = "distributed_json_promoted_paths"
|
||||
PromotedPathsTableName = otelcollectorconst.DistributedPromotedPathsTable
|
||||
SkipIndexTableName = "system.data_skipping_indices"
|
||||
)
|
||||
|
||||
@@ -36,8 +36,8 @@ func (i *PromotePath) ValidateAndSetDefaults() error {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "array paths can not be promoted or indexed")
|
||||
}
|
||||
|
||||
if strings.HasPrefix(i.Path, constants.BodyV2ColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyV2ColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
if strings.HasPrefix(i.Path, constants.BodyJSONColumnPrefix) || strings.HasPrefix(i.Path, constants.BodyPromotedColumnPrefix) {
|
||||
return errors.Newf(errors.TypeInvalidInput, errors.CodeInvalidInput, "`%s`, `%s` don't add these prefixes to the path", constants.BodyJSONColumnPrefix, constants.BodyPromotedColumnPrefix)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(i.Path, telemetrytypes.BodyJSONStringSearchPrefix) {
|
||||
|
||||
@@ -197,15 +197,6 @@ func (f FilterOperator) IsArrayOperator() bool {
|
||||
}
|
||||
}
|
||||
|
||||
func (f FilterOperator) IsOpValidForJSON() bool {
|
||||
switch f {
|
||||
case FilterOperatorExists, FilterOperatorNotExists, FilterOperatorContains, FilterOperatorNotContains:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
type OrderDirection struct {
|
||||
valuer.String
|
||||
}
|
||||
|
||||
@@ -62,10 +62,6 @@ func (f *TelemetryFieldKey) ArrayParentPaths() []string {
|
||||
return paths
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) MustBuildJSONCondition() bool {
|
||||
return f.FieldDataType != FieldDataTypeJSON && f.JSONDataType != nil
|
||||
}
|
||||
|
||||
func (f *TelemetryFieldKey) ArrayParentSelectors() []*FieldKeySelector {
|
||||
paths := f.ArrayParentPaths()
|
||||
selectors := make([]*FieldKeySelector, 0, len(paths))
|
||||
|
||||
@@ -21,7 +21,6 @@ var (
|
||||
// int64 and number are synonyms for float64
|
||||
FieldDataTypeInt64 = FieldDataType{valuer.NewString("int64")}
|
||||
FieldDataTypeNumber = FieldDataType{valuer.NewString("number")}
|
||||
FieldDataTypeJSON = FieldDataType{valuer.NewString("json")}
|
||||
FieldDataTypeUnspecified = FieldDataType{valuer.NewString("")}
|
||||
|
||||
FieldDataTypeArrayString = FieldDataType{valuer.NewString("[]string")}
|
||||
|
||||
@@ -40,7 +40,7 @@ type JSONAccessNode struct {
|
||||
// Node information
|
||||
Name string
|
||||
IsTerminal bool
|
||||
isRoot bool // marked true for only body_v2 and body_promoted
|
||||
isRoot bool // marked true for only body_json and body_json_promoted
|
||||
|
||||
// Precomputed type information (single source of truth)
|
||||
AvailableTypes []JSONDataType
|
||||
|
||||
@@ -1,19 +1,12 @@
|
||||
package telemetrytypes
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
otelconstants "github.com/SigNoz/signoz-otel-collector/constants"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
const (
|
||||
bodyV2Column = otelconstants.BodyV2Column
|
||||
bodyPromotedColumn = otelconstants.BodyPromotedColumn
|
||||
)
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions for Test Data Creation
|
||||
// ============================================================================
|
||||
@@ -116,8 +109,8 @@ func TestNode_Alias(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "Root node returns name as-is",
|
||||
node: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
expected: bodyV2Column,
|
||||
node: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
expected: "body_json",
|
||||
},
|
||||
{
|
||||
name: "Node without parent returns backticked name",
|
||||
@@ -131,9 +124,9 @@ func TestNode_Alias(t *testing.T) {
|
||||
name: "Node with root parent uses dot separator",
|
||||
node: &JSONAccessNode{
|
||||
Name: "age",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".age`",
|
||||
expected: "`" + "body_json" + ".age`",
|
||||
},
|
||||
{
|
||||
name: "Node with non-root parent uses array separator",
|
||||
@@ -141,10 +134,10 @@ func TestNode_Alias(t *testing.T) {
|
||||
Name: "name",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".education[].name`",
|
||||
expected: "`" + "body_json" + ".education[].name`",
|
||||
},
|
||||
{
|
||||
name: "Nested array path with multiple levels",
|
||||
@@ -154,11 +147,11 @@ func TestNode_Alias(t *testing.T) {
|
||||
Name: "awards",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: "`" + bodyV2Column + ".education[].awards[].type`",
|
||||
expected: "`" + "body_json" + ".education[].awards[].type`",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -180,18 +173,18 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
name: "Simple field path from root",
|
||||
node: &JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: bodyV2Column + ".`user`",
|
||||
expected: "body_json" + ".`user`",
|
||||
},
|
||||
{
|
||||
name: "Field path with backtick-required key",
|
||||
node: &JSONAccessNode{
|
||||
Name: "user-name", // requires backtick
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
expected: bodyV2Column + ".`user-name`",
|
||||
expected: "body_json" + ".`user-name`",
|
||||
},
|
||||
{
|
||||
name: "Nested field path",
|
||||
@@ -199,11 +192,11 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
Name: "age",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "user",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + bodyV2Column + ".user`.`age`",
|
||||
expected: "`" + "body_json" + ".user`.`age`",
|
||||
},
|
||||
{
|
||||
name: "Array element field path",
|
||||
@@ -211,11 +204,11 @@ func TestNode_FieldPath(t *testing.T) {
|
||||
Name: "name",
|
||||
Parent: &JSONAccessNode{
|
||||
Name: "education",
|
||||
Parent: NewRootJSONAccessNode(bodyV2Column, 32, 0),
|
||||
Parent: NewRootJSONAccessNode("body_json", 32, 0),
|
||||
},
|
||||
},
|
||||
// FieldPath() always wraps the field name in backticks
|
||||
expected: "`" + bodyV2Column + ".education`.`name`",
|
||||
expected: "`" + "body_json" + ".education`.`name`",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -243,36 +236,36 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
{
|
||||
name: "Simple path not promoted",
|
||||
key: makeKey("user.name", String, false),
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Simple path promoted",
|
||||
key: makeKey("user.name", String, true),
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
- name: user.name
|
||||
column: %s
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- String
|
||||
maxDynamicTypes: 16
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column, bodyPromotedColumn),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Empty path returns error",
|
||||
@@ -285,8 +278,8 @@ func TestPlanJSON_BasicStructure(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := tt.key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
@@ -311,9 +304,9 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
{
|
||||
name: "Single array level - JSON branch only",
|
||||
path: "education[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -325,14 +318,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Single array level - both JSON and Dynamic branches",
|
||||
path: "education[].awards[].type",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -359,14 +352,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Deeply nested array path",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -406,14 +399,14 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "ArrayAnyIndex replacement [*] to []",
|
||||
path: "education[*].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -425,7 +418,7 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -433,8 +426,8 @@ func TestPlanJSON_ArrayPaths(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
key := makeKey(tt.path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, key.JSONPlan)
|
||||
@@ -452,15 +445,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
t.Run("Non-promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 1)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -487,7 +480,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column)
|
||||
`
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
@@ -495,15 +488,15 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
t.Run("Promoted plan", func(t *testing.T) {
|
||||
key := makeKey(path, String, true)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 2)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -531,7 +524,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json_promoted
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -561,7 +554,7 @@ func TestPlanJSON_PromotedVsNonPromoted(t *testing.T) {
|
||||
maxDynamicPaths: 256
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column, bodyPromotedColumn)
|
||||
`
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
})
|
||||
@@ -582,11 +575,11 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Very deep nesting - validates progression doesn't go negative",
|
||||
path: "interests[].entities[].reviews[].entries[].metadata[].positions[].name",
|
||||
expectedYAML: `
|
||||
- name: interests
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -626,14 +619,14 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
- String
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Path with mixed scalar and array types",
|
||||
path: "education[].type",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -646,20 +639,20 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
maxDynamicTypes: 8
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
expectedYAML: fmt.Sprintf(`
|
||||
name: "Exists with only array types available",
|
||||
path: "education",
|
||||
expectedYAML: `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
isTerminal: true
|
||||
elemType: Array(JSON)
|
||||
`, bodyV2Column),
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -675,8 +668,8 @@ func TestPlanJSON_EdgeCases(t *testing.T) {
|
||||
}
|
||||
key := makeKey(tt.path, keyType, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
@@ -694,15 +687,15 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
path := "education[].awards[].participated[].team[].branch"
|
||||
key := makeKey(path, String, false)
|
||||
err := key.SetJSONAccessPlan(JSONColumnMetadata{
|
||||
BaseColumn: bodyV2Column,
|
||||
PromotedColumn: bodyPromotedColumn,
|
||||
BaseColumn: "body_json",
|
||||
PromotedColumn: "body_json_promoted",
|
||||
}, types)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, key.JSONPlan, 1)
|
||||
|
||||
expectedYAML := fmt.Sprintf(`
|
||||
expectedYAML := `
|
||||
- name: education
|
||||
column: %s
|
||||
column: body_json
|
||||
availableTypes:
|
||||
- Array(JSON)
|
||||
maxDynamicTypes: 16
|
||||
@@ -787,7 +780,7 @@ func TestPlanJSON_TreeStructure(t *testing.T) {
|
||||
maxDynamicPaths: 64
|
||||
isTerminal: true
|
||||
elemType: String
|
||||
`, bodyV2Column)
|
||||
`
|
||||
|
||||
got := plansToYAML(t, key.JSONPlan)
|
||||
require.YAMLEq(t, expectedYAML, got)
|
||||
|
||||
@@ -2,7 +2,6 @@ package telemetrytypestest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
schemamigrator "github.com/SigNoz/signoz-otel-collector/cmd/signozschemamigrator/schema_migrator"
|
||||
@@ -177,9 +176,8 @@ func matchesKey(selector *telemetrytypes.FieldKeySelector, key *telemetrytypes.T
|
||||
return true
|
||||
}
|
||||
|
||||
matchNameExceptions := []string{"body"}
|
||||
// Check name (already checked in matchesName, but double-check here)
|
||||
if selector.Name != "" && !matchesName(selector, key.Name) && slices.Contains(matchNameExceptions, key.Name) {
|
||||
if selector.Name != "" && !matchesName(selector, key.Name) {
|
||||
return false
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user