Compare commits

..

2 Commits

Author SHA1 Message Date
Srikanth Chekuri
1eb27d0bc4 chore: add schema version specific validations (#10808)
Some checks are pending
build-staging / prepare (push) Waiting to run
build-staging / js-build (push) Blocked by required conditions
build-staging / go-build (push) Blocked by required conditions
build-staging / staging (push) Blocked by required conditions
Release Drafter / update_release_draft (push) Waiting to run
* chore: add schema version specific validations

* chore: address reivew comments

* chore: additional checks

* chore: adrress lint

* chore: more lint
2026-04-06 10:45:41 +00:00
Vikrant Gupta
55cce3c708 feat(authz): accept singular roles for user and service accounts (#10827)
* feat(authz): accept singular roles for user and service accounts

* feat(authz): update integration tests

* feat(authz): update integration tests

* feat: move role management to a single select flow on members and service account pages(temporarily)

* feat(authz): enable stats reporter for service accounts

* feat(authz): identity call for activating/deleting user

---------

Co-authored-by: SagarRajput-7 <sagar@signoz.io>
2026-04-06 10:41:56 +00:00
53 changed files with 3081 additions and 1015 deletions

View File

@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"log/slog"
"strings"
"sync"
"time"
@@ -64,12 +63,12 @@ func NewAnomalyRule(
BaseRule: baseRule,
}
switch strings.ToLower(p.RuleCondition.Seasonality) {
case "hourly":
switch p.RuleCondition.Seasonality {
case ruletypes.SeasonalityHourly:
t.seasonality = anomaly.SeasonalityHourly
case "daily":
case ruletypes.SeasonalityDaily:
t.seasonality = anomaly.SeasonalityDaily
case "weekly":
case ruletypes.SeasonalityWeekly:
t.seasonality = anomaly.SeasonalityWeekly
default:
t.seasonality = anomaly.SeasonalityDaily

View File

@@ -67,7 +67,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
}},
},
SelectedQuery: "A",
Seasonality: "daily",
Seasonality: ruletypes.SeasonalityDaily,
Thresholds: &ruletypes.RuleThresholdData{
Kind: ruletypes.BasicThresholdKind,
Spec: ruletypes.BasicRuleThresholds{{
@@ -170,7 +170,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
}},
},
SelectedQuery: "A",
Seasonality: "daily",
Seasonality: ruletypes.SeasonalityDaily,
Thresholds: &ruletypes.RuleThresholdData{
Kind: ruletypes.BasicThresholdKind,
Spec: ruletypes.BasicRuleThresholds{{

View File

@@ -83,37 +83,6 @@
opacity: 0.6;
}
&__role-select {
width: 100%;
.ant-select-selector {
background-color: var(--l2-background) !important;
border-color: var(--border) !important;
border-radius: 2px;
padding: var(--padding-1) var(--padding-2) !important;
display: flex;
align-items: center;
flex-wrap: wrap;
min-height: 32px;
height: auto !important;
}
.ant-select-selection-item {
font-size: var(--font-size-sm);
color: var(--l1-foreground);
line-height: 22px;
letter-spacing: -0.07px;
}
.ant-select-arrow {
color: var(--foreground);
}
&:not(.ant-select-disabled):hover .ant-select-selector {
border-color: var(--foreground);
}
}
&__meta {
display: flex;
flex-direction: column;

View File

@@ -61,10 +61,6 @@ function toSaveApiError(err: unknown): APIError {
);
}
function areSortedArraysEqual(a: string[], b: string[]): boolean {
return JSON.stringify([...a].sort()) === JSON.stringify([...b].sort());
}
export interface EditMemberDrawerProps {
member: MemberRow | null;
open: boolean;
@@ -83,7 +79,7 @@ function EditMemberDrawer({
const { user: currentUser } = useAppContext();
const [localDisplayName, setLocalDisplayName] = useState('');
const [localRoles, setLocalRoles] = useState<string[]>([]);
const [localRole, setLocalRole] = useState('');
const [isSaving, setIsSaving] = useState(false);
const [saveErrors, setSaveErrors] = useState<SaveError[]>([]);
const [isGeneratingLink, setIsGeneratingLink] = useState(false);
@@ -133,14 +129,14 @@ function EditMemberDrawer({
}, [fetchedUserId, fetchedUserDisplayName, member?.name]);
useEffect(() => {
setLocalRoles(fetchedRoleIds);
setLocalRole(fetchedRoleIds[0] ?? '');
}, [fetchedRoleIds]);
const isDirty =
member !== null &&
fetchedUser != null &&
(localDisplayName !== fetchedDisplayName ||
!areSortedArraysEqual(localRoles, fetchedRoleIds));
localRole !== (fetchedRoleIds[0] ?? ''));
const { mutateAsync: updateMyUser } = useUpdateMyUserV2();
const { mutateAsync: updateUser } = useUpdateUser();
@@ -224,7 +220,7 @@ function EditMemberDrawer({
setIsSaving(true);
try {
const nameChanged = localDisplayName !== fetchedDisplayName;
const rolesChanged = !areSortedArraysEqual(localRoles, fetchedRoleIds);
const rolesChanged = localRole !== (fetchedRoleIds[0] ?? '');
const namePromise = nameChanged
? isSelf
@@ -237,7 +233,9 @@ function EditMemberDrawer({
const [nameResult, rolesResult] = await Promise.allSettled([
namePromise,
rolesChanged ? applyDiff(localRoles, availableRoles) : Promise.resolve([]),
rolesChanged
? applyDiff([localRole].filter(Boolean), availableRoles)
: Promise.resolve([]),
]);
const errors: SaveError[] = [];
@@ -255,7 +253,10 @@ function EditMemberDrawer({
context: 'Roles update',
apiError: toSaveApiError(rolesResult.reason),
onRetry: async (): Promise<void> => {
const failures = await applyDiff(localRoles, availableRoles);
const failures = await applyDiff(
[localRole].filter(Boolean),
availableRoles,
);
setSaveErrors((prev) => {
const rest = prev.filter((e) => e.context !== 'Roles update');
return [
@@ -303,7 +304,7 @@ function EditMemberDrawer({
isDirty,
isSelf,
localDisplayName,
localRoles,
localRole,
fetchedDisplayName,
fetchedRoleIds,
updateMyUser,
@@ -445,15 +446,10 @@ function EditMemberDrawer({
>
<div className="edit-member-drawer__input-wrapper edit-member-drawer__input-wrapper--disabled">
<div className="edit-member-drawer__disabled-roles">
{localRoles.length > 0 ? (
localRoles.map((roleId) => {
const role = availableRoles.find((r) => r.id === roleId);
return (
<Badge key={roleId} color="vanilla">
{role?.name ?? roleId}
</Badge>
);
})
{localRole ? (
<Badge color="vanilla">
{availableRoles.find((r) => r.id === localRole)?.name ?? localRole}
</Badge>
) : (
<span className="edit-member-drawer__email-text"></span>
)}
@@ -464,15 +460,14 @@ function EditMemberDrawer({
) : (
<RolesSelect
id="member-role"
mode="multiple"
roles={availableRoles}
loading={rolesLoading}
isError={rolesError}
error={rolesErrorObj}
onRefetch={refetchRoles}
value={localRoles}
onChange={(roles): void => {
setLocalRoles(roles);
value={localRole}
onChange={(role): void => {
setLocalRole(role);
setSaveErrors((prev) =>
prev.filter(
(err) =>
@@ -480,8 +475,7 @@ function EditMemberDrawer({
),
);
}}
className="edit-member-drawer__role-select"
placeholder="Select roles"
placeholder="Select role"
/>
)}
</div>

View File

@@ -5,7 +5,6 @@ import {
getResetPasswordToken,
useDeleteUser,
useGetUser,
useRemoveUserRoleByUserIDAndRoleID,
useSetRoleByUserID,
useUpdateMyUserV2,
useUpdateUser,
@@ -56,7 +55,6 @@ jest.mock('api/generated/services/users', () => ({
useUpdateUser: jest.fn(),
useUpdateMyUserV2: jest.fn(),
useSetRoleByUserID: jest.fn(),
useRemoveUserRoleByUserIDAndRoleID: jest.fn(),
getResetPasswordToken: jest.fn(),
}));
@@ -171,10 +169,6 @@ describe('EditMemberDrawer', () => {
mutateAsync: jest.fn().mockResolvedValue({}),
isLoading: false,
});
(useRemoveUserRoleByUserIDAndRoleID as jest.Mock).mockReturnValue({
mutateAsync: jest.fn().mockResolvedValue({}),
isLoading: false,
});
(useDeleteUser as jest.Mock).mockReturnValue({
mutate: mockDeleteMutate,
isLoading: false,
@@ -248,7 +242,7 @@ describe('EditMemberDrawer', () => {
expect(onClose).not.toHaveBeenCalled();
});
it('calls setRole when a new role is added', async () => {
it('selecting a different role calls setRole with the new role name', async () => {
const onComplete = jest.fn();
const user = userEvent.setup({ pointerEventsCheck: 0 });
const mockSet = jest.fn().mockResolvedValue({});
@@ -277,32 +271,30 @@ describe('EditMemberDrawer', () => {
});
});
it('calls removeRole when an existing role is removed', async () => {
it('does not call removeRole when the role is changed', async () => {
const onComplete = jest.fn();
const user = userEvent.setup({ pointerEventsCheck: 0 });
const mockRemove = jest.fn().mockResolvedValue({});
const mockSet = jest.fn().mockResolvedValue({});
(useRemoveUserRoleByUserIDAndRoleID as jest.Mock).mockReturnValue({
mutateAsync: mockRemove,
(useSetRoleByUserID as jest.Mock).mockReturnValue({
mutateAsync: mockSet,
isLoading: false,
});
renderDrawer({ onComplete });
// Wait for the signoz-admin tag to appear, then click its remove button
const adminTag = await screen.findByTitle('signoz-admin');
const removeBtn = adminTag.querySelector(
'.ant-select-selection-item-remove',
) as Element;
await user.click(removeBtn);
// Switch from signoz-admin to signoz-viewer using single-select
await user.click(screen.getByLabelText('Roles'));
await user.click(await screen.findByTitle('signoz-viewer'));
const saveBtn = screen.getByRole('button', { name: /save member details/i });
await waitFor(() => expect(saveBtn).not.toBeDisabled());
await user.click(saveBtn);
await waitFor(() => {
expect(mockRemove).toHaveBeenCalledWith({
pathParams: { id: 'user-1', roleId: managedRoles[0].id },
expect(mockSet).toHaveBeenCalledWith({
pathParams: { id: 'user-1' },
data: { name: 'signoz-viewer' },
});
expect(onComplete).toHaveBeenCalled();
});

View File

@@ -88,3 +88,13 @@
color: var(--destructive);
}
}
.roles-single-select {
.ant-select-selector {
min-height: 32px;
background-color: var(--l2-background) !important;
border: 1px solid var(--border) !important;
border-radius: 2px;
padding: 2px var(--padding-2) !important;
}
}

View File

@@ -158,10 +158,10 @@ function RolesSelect(props: RolesSelectProps): JSX.Element {
return (
<Select
id={id}
value={value}
value={value || undefined}
onChange={onChange}
placeholder={placeholder}
className={cx('roles-select', className)}
className={cx('roles-single-select', className)}
loading={loading}
notFoundContent={notFoundContent}
options={options}

View File

@@ -16,8 +16,8 @@ interface OverviewTabProps {
account: ServiceAccountRow;
localName: string;
onNameChange: (v: string) => void;
localRoles: string[];
onRolesChange: (v: string[]) => void;
localRole: string;
onRoleChange: (v: string) => void;
isDisabled: boolean;
availableRoles: AuthtypesRoleDTO[];
rolesLoading?: boolean;
@@ -31,8 +31,8 @@ function OverviewTab({
account,
localName,
onNameChange,
localRoles,
onRolesChange,
localRole,
onRoleChange,
isDisabled,
availableRoles,
rolesLoading,
@@ -96,15 +96,10 @@ function OverviewTab({
{isDisabled ? (
<div className="sa-drawer__input-wrapper sa-drawer__input-wrapper--disabled">
<div className="sa-drawer__disabled-roles">
{localRoles.length > 0 ? (
localRoles.map((roleId) => {
const role = availableRoles.find((r) => r.id === roleId);
return (
<Badge key={roleId} color="vanilla">
{role?.name ?? roleId}
</Badge>
);
})
{localRole ? (
<Badge color="vanilla">
{availableRoles.find((r) => r.id === localRole)?.name ?? localRole}
</Badge>
) : (
<span className="sa-drawer__input-text"></span>
)}
@@ -114,15 +109,14 @@ function OverviewTab({
) : (
<RolesSelect
id="sa-roles"
mode="multiple"
roles={availableRoles}
loading={rolesLoading}
isError={rolesError}
error={rolesErrorObj}
onRefetch={onRefetchRoles}
value={localRoles}
onChange={onRolesChange}
placeholder="Select roles"
value={localRole}
onChange={onRoleChange}
placeholder="Select role"
/>
)}
</div>

View File

@@ -80,7 +80,7 @@ function ServiceAccountDrawer({
parseAsBoolean.withDefault(false),
);
const [localName, setLocalName] = useState('');
const [localRoles, setLocalRoles] = useState<string[]>([]);
const [localRole, setLocalRole] = useState('');
const [isSaving, setIsSaving] = useState(false);
const [saveErrors, setSaveErrors] = useState<SaveError[]>([]);
@@ -116,7 +116,7 @@ function ServiceAccountDrawer({
}, [account?.id, account?.name, setKeysPage]);
useEffect(() => {
setLocalRoles(currentRoles.map((r) => r.id).filter(Boolean) as string[]);
setLocalRole(currentRoles[0]?.id ?? '');
}, [currentRoles]);
const isDeleted =
@@ -125,8 +125,7 @@ function ServiceAccountDrawer({
const isDirty =
account !== null &&
(localName !== (account.name ?? '') ||
JSON.stringify([...localRoles].sort()) !==
JSON.stringify([...currentRoles.map((r) => r.id).filter(Boolean)].sort()));
localRole !== (currentRoles[0]?.id ?? ''));
const {
roles: availableRoles,
@@ -216,7 +215,10 @@ function ServiceAccountDrawer({
const retryRolesUpdate = useCallback(async (): Promise<void> => {
try {
const failures = await applyDiff(localRoles, availableRoles);
const failures = await applyDiff(
[localRole].filter(Boolean),
availableRoles,
);
if (failures.length === 0) {
setSaveErrors((prev) => prev.filter((e) => e.context !== 'Roles update'));
} else {
@@ -240,7 +242,7 @@ function ServiceAccountDrawer({
),
);
}
}, [localRoles, availableRoles, applyDiff, toSaveApiError, makeRoleRetry]);
}, [localRole, availableRoles, applyDiff, toSaveApiError, makeRoleRetry]);
const handleSave = useCallback(async (): Promise<void> => {
if (!account || !isDirty) {
@@ -259,7 +261,7 @@ function ServiceAccountDrawer({
const [nameResult, rolesResult] = await Promise.allSettled([
namePromise,
applyDiff(localRoles, availableRoles),
applyDiff([localRole].filter(Boolean), availableRoles),
]);
const errors: SaveError[] = [];
@@ -308,7 +310,7 @@ function ServiceAccountDrawer({
account,
isDirty,
localName,
localRoles,
localRole,
availableRoles,
updateMutateAsync,
applyDiff,
@@ -410,8 +412,8 @@ function ServiceAccountDrawer({
account={account}
localName={localName}
onNameChange={handleNameChange}
localRoles={localRoles}
onRolesChange={setLocalRoles}
localRole={localRole}
onRoleChange={setLocalRole}
isDisabled={isDeleted}
availableRoles={availableRoles}
rolesLoading={rolesLoading}

View File

@@ -139,20 +139,20 @@ describe('ServiceAccountDrawer', () => {
});
});
it('changing roles enables Save; clicking Save sends updated roles in payload', async () => {
const updateSpy = jest.fn();
it('changing roles enables Save; clicking Save sends role add request without delete', async () => {
const roleSpy = jest.fn();
const deleteSpy = jest.fn();
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.put(SA_ENDPOINT, async (req, res, ctx) => {
updateSpy(await req.json());
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
rest.post(SA_ROLES_ENDPOINT, async (req, res, ctx) => {
roleSpy(await req.json());
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
rest.delete(SA_ROLE_DELETE_ENDPOINT, (_, res, ctx) => {
deleteSpy();
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
);
renderDrawer();
@@ -167,12 +167,12 @@ describe('ServiceAccountDrawer', () => {
await user.click(saveBtn);
await waitFor(() => {
expect(updateSpy).not.toHaveBeenCalled();
expect(roleSpy).toHaveBeenCalledWith(
expect.objectContaining({
id: '019c24aa-2248-7585-a129-4188b3473c27',
}),
);
expect(deleteSpy).not.toHaveBeenCalled();
});
});
@@ -350,7 +350,7 @@ describe('ServiceAccountDrawer save-error UX', () => {
).toBeInTheDocument();
});
it('role update failure shows SaveErrorItem with the role name context', async () => {
it('role add failure shows SaveErrorItem with the role name context', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(

View File

@@ -1,10 +1,6 @@
import { useCallback, useMemo } from 'react';
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
import {
useGetUser,
useRemoveUserRoleByUserIDAndRoleID,
useSetRoleByUserID,
} from 'api/generated/services/users';
import { useGetUser, useSetRoleByUserID } from 'api/generated/services/users';
export interface MemberRoleUpdateFailure {
roleName: string;
@@ -43,7 +39,6 @@ export function useMemberRoleManager(
);
const { mutateAsync: setRole } = useSetRoleByUserID();
const { mutateAsync: removeRole } = useRemoveUserRoleByUserIDAndRoleID();
const applyDiff = useCallback(
async (
@@ -53,25 +48,12 @@ export function useMemberRoleManager(
const currentRoleIdSet = new Set(fetchedRoleIds);
const desiredRoleIdSet = new Set(localRoleIds.filter(Boolean));
const toRemove = currentUserRoles.filter((ur) => {
const id = ur.role?.id ?? ur.roleId;
return id && !desiredRoleIdSet.has(id);
});
const toAdd = availableRoles.filter(
(r) => r.id && desiredRoleIdSet.has(r.id) && !currentRoleIdSet.has(r.id),
);
/// TODO: re-enable deletes once BE for this is streamlined
const allOps = [
...toRemove.map((ur) => ({
roleName: ur.role?.name ?? 'unknown',
run: (): ReturnType<typeof removeRole> =>
removeRole({
pathParams: {
id: userId,
roleId: ur.role?.id ?? ur.roleId ?? '',
},
}),
})),
...toAdd.map((role) => ({
roleName: role.name ?? 'unknown',
run: (): ReturnType<typeof setRole> =>
@@ -94,7 +76,7 @@ export function useMemberRoleManager(
return failures;
},
[userId, fetchedRoleIds, currentUserRoles, setRole, removeRole],
[userId, fetchedRoleIds, setRole],
);
return { fetchedRoleIds, isLoading, applyDiff };

View File

@@ -3,7 +3,6 @@ import { useQueryClient } from 'react-query';
import {
getGetServiceAccountRolesQueryKey,
useCreateServiceAccountRole,
useDeleteServiceAccountRole,
useGetServiceAccountRoles,
} from 'api/generated/services/serviceaccount';
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
@@ -36,7 +35,6 @@ export function useServiceAccountRoleManager(
// the retry for these mutations is safe due to being idempotent on backend
const { mutateAsync: createRole } = useCreateServiceAccountRole();
const { mutateAsync: deleteRole } = useDeleteServiceAccountRole();
const invalidateRoles = useCallback(
() =>
@@ -62,21 +60,13 @@ export function useServiceAccountRoleManager(
(r) => r.id && desiredRoleIds.has(r.id) && !currentRoleIds.has(r.id),
);
const removedRoles = currentRoles.filter(
(r) => r.id && !desiredRoleIds.has(r.id),
);
// TODO: re-enable deletes once BE for this is streamlined
const allOperations = [
...addedRoles.map((role) => ({
role,
run: (): ReturnType<typeof createRole> =>
createRole({ pathParams: { id: accountId }, data: { id: role.id } }),
})),
...removedRoles.map((role) => ({
role,
run: (): ReturnType<typeof deleteRole> =>
deleteRole({ pathParams: { id: accountId, rid: role.id } }),
})),
];
const results = await Promise.allSettled(
@@ -102,7 +92,7 @@ export function useServiceAccountRoleManager(
return failures;
},
[accountId, currentRoles, createRole, deleteRole, invalidateRoles],
[accountId, currentRoles, createRole, invalidateRoles],
);
return {

View File

@@ -240,6 +240,26 @@ func (m *MockNotificationManager) DeleteAllRoutePoliciesByName(ctx context.Conte
return nil
}
func (m *MockNotificationManager) GetRoutePoliciesByChannel(ctx context.Context, orgID string, channelName string) ([]*alertmanagertypes.RoutePolicy, error) {
if orgID == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "orgID cannot be empty")
}
var matched []*alertmanagertypes.RoutePolicy
for _, route := range m.routes {
if route.OrgID != orgID {
continue
}
for _, ch := range route.Channels {
if ch == channelName {
matched = append(matched, route)
break
}
}
}
return matched, nil
}
func (m *MockNotificationManager) Match(ctx context.Context, orgID string, ruleID string, set model.LabelSet) ([]string, error) {
key := getKey(orgID, ruleID)
if err := m.errors[key]; err != nil {

View File

@@ -59,6 +59,10 @@ func (m *MockSQLRouteStore) DeleteRouteByName(ctx context.Context, orgID string,
return m.routeStore.DeleteRouteByName(ctx, orgID, name)
}
func (m *MockSQLRouteStore) GetAll(ctx context.Context, orgID string) ([]*alertmanagertypes.RoutePolicy, error) {
return m.routeStore.GetAll(ctx, orgID)
}
func (m *MockSQLRouteStore) ExpectGetByID(orgID, id string, route *alertmanagertypes.RoutePolicy) {
rows := sqlmock.NewRows([]string{"id", "org_id", "name", "expression", "kind", "description", "enabled", "tags", "channels", "created_at", "updated_at", "created_by", "updated_by"})

View File

@@ -83,6 +83,18 @@ func (store *store) GetAllByName(ctx context.Context, orgID string, name string)
return routes, nil
}
func (store *store) GetAll(ctx context.Context, orgID string) ([]*routeTypes.RoutePolicy, error) {
var routes []*routeTypes.RoutePolicy
err := store.sqlstore.BunDBCtx(ctx).NewSelect().Model(&routes).Where("org_id = ?", orgID).Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch routing policies for orgID: %s", orgID)
}
return routes, nil
}
func (store *store) DeleteRouteByName(ctx context.Context, orgID string, name string) error {
_, err := store.sqlstore.BunDBCtx(ctx).NewDelete().Model((*routeTypes.RoutePolicy)(nil)).Where("org_id = ?", orgID).Where("name = ?", name).Exec(ctx)
if err != nil {

View File

@@ -23,6 +23,10 @@ type NotificationManager interface {
DeleteRoutePolicy(ctx context.Context, orgID string, routeID string) error
DeleteAllRoutePoliciesByName(ctx context.Context, orgID string, name string) error
// GetRoutePoliciesByChannel returns all route policies (both rule-based and policy-based)
// that reference the given channel name.
GetRoutePoliciesByChannel(ctx context.Context, orgID string, channelName string) ([]*alertmanagertypes.RoutePolicy, error)
// Route matching
Match(ctx context.Context, orgID string, ruleID string, set model.LabelSet) ([]string, error)
}

View File

@@ -155,6 +155,28 @@ func (r *provider) GetAllRoutePolicies(ctx context.Context, orgID string) ([]*al
return r.routeStore.GetAllByKind(ctx, orgID, alertmanagertypes.PolicyBasedExpression)
}
func (r *provider) GetRoutePoliciesByChannel(ctx context.Context, orgID string, channelName string) ([]*alertmanagertypes.RoutePolicy, error) {
if orgID == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "orgID cannot be empty")
}
allRoutes, err := r.routeStore.GetAll(ctx, orgID)
if err != nil {
return nil, err
}
var matched []*alertmanagertypes.RoutePolicy
for _, route := range allRoutes {
for _, ch := range route.Channels {
if ch == channelName {
matched = append(matched, route)
break
}
}
}
return matched, nil
}
func (r *provider) DeleteRoutePolicy(ctx context.Context, orgID string, routeID string) error {
if routeID == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "routeID cannot be empty")

View File

@@ -169,6 +169,21 @@ func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, c
return err
}
// Check if channel is referenced by any route policy (rule-based or policy-based)
policies, err := provider.notificationManager.GetRoutePoliciesByChannel(ctx, orgID, channel.Name)
if err != nil {
return err
}
if len(policies) > 0 {
names := make([]string, 0, len(policies))
for _, p := range policies {
names = append(names, p.Name)
}
return errors.NewInvalidInputf(errors.CodeInvalidInput,
"channel %q cannot be deleted because it is used by the following routing policies: %v",
channel.Name, names)
}
config, err := provider.configStore.Get(ctx, orgID)
if err != nil {
return err

View File

@@ -376,7 +376,7 @@ func (module *module) getOrGetSetIdentity(ctx context.Context, serviceAccountID
}
func (module *module) setRole(ctx context.Context, orgID valuer.UUID, id valuer.UUID, role *authtypes.Role) error {
serviceAccount, err := module.Get(ctx, orgID, id)
serviceAccount, err := module.GetWithRoles(ctx, orgID, id)
if err != nil {
return err
}
@@ -386,12 +386,24 @@ func (module *module) setRole(ctx context.Context, orgID valuer.UUID, id valuer.
return err
}
err = module.authz.Grant(ctx, orgID, []string{role.Name}, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, id.String(), orgID, nil))
err = module.authz.ModifyGrant(ctx, orgID, serviceAccount.RoleNames(), []string{role.Name}, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, id.String(), orgID, nil))
if err != nil {
return err
}
err = module.store.CreateServiceAccountRole(ctx, serviceAccountRole)
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.store.DeleteServiceAccountRoles(ctx, serviceAccount.ID)
if err != nil {
return err
}
err = module.store.CreateServiceAccountRole(ctx, serviceAccountRole)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}

View File

@@ -170,6 +170,21 @@ func (store *store) CreateServiceAccountRole(ctx context.Context, serviceAccount
return nil
}
func (store *store) DeleteServiceAccountRoles(ctx context.Context, serviceAccountID valuer.UUID) error {
_, err := store.
sqlstore.
BunDBCtx(ctx).
NewDelete().
Model(new(serviceaccounttypes.ServiceAccountRole)).
Where("service_account_id = ?", serviceAccountID).
Exec(ctx)
if err != nil {
return err
}
return nil
}
func (store *store) DeleteServiceAccountRole(ctx context.Context, serviceAccountID valuer.UUID, roleID valuer.UUID) error {
_, err := store.
sqlstore.

View File

@@ -5,6 +5,7 @@ import (
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -66,6 +67,8 @@ type Module interface {
GetIdentity(context.Context, string) (*authtypes.Identity, error)
Config() Config
statsreporter.StatsCollector
}
type Handler interface {

View File

@@ -383,6 +383,11 @@ func (module *setter) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot self delete")
}
err = user.UpdateStatus(types.UserStatusDeleted)
if err != nil {
return err
}
userRoles, err := module.getter.GetRolesByUserID(ctx, user.ID)
if err != nil {
return err
@@ -406,6 +411,8 @@ func (module *setter) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
traitsOrProperties := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Deleted", map[string]any{
"deleted_by": deletedBy,
})
@@ -568,8 +575,13 @@ func (module *setter) UpdatePasswordByResetPasswordToken(ctx context.Context, to
roleNames := roleNamesFromUserRoles(userRoles)
isPendingInviteUser := user.Status == types.UserStatusPendingInvite
// since grant is idempotent, multiple calls won't cause issues in case of retries
if user.Status == types.UserStatusPendingInvite {
if isPendingInviteUser {
if err := user.UpdateStatus(types.UserStatusActive); err != nil {
return err
}
if err = module.authz.Grant(
ctx,
user.OrgID,
@@ -580,15 +592,14 @@ func (module *setter) UpdatePasswordByResetPasswordToken(ctx context.Context, to
}
traitsOrProperties := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Activated", traitsOrProperties)
}
return module.store.RunInTx(ctx, func(ctx context.Context) error {
if user.Status == types.UserStatusPendingInvite {
if err := user.UpdateStatus(types.UserStatusActive); err != nil {
return err
}
if err := module.store.UpdateUser(ctx, user.OrgID, user); err != nil {
if isPendingInviteUser {
err := module.store.UpdateUser(ctx, user.OrgID, user)
if err != nil {
return err
}
}
@@ -817,6 +828,7 @@ func (module *setter) activatePendingUser(ctx context.Context, user *types.User,
}
traitsOrProperties := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Activated", traitsOrProperties)
return nil
@@ -866,16 +878,17 @@ func (module *setter) AddUserRole(ctx context.Context, orgID, userID valuer.UUID
if err != nil {
return err
}
for _, userRole := range existingUserRoles {
if userRole.Role != nil && userRole.Role.Name == roleName {
return nil // role already assigned no-op
}
existingRoles := make([]string, len(existingUserRoles))
for idx, role := range existingUserRoles {
existingRoles[idx] = role.Role.Name
}
// grant via authz (idempotent)
if err := module.authz.Grant(
if err := module.authz.ModifyGrant(
ctx,
orgID,
existingRoles,
[]string{roleName},
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), existingUser.OrgID, nil),
); err != nil {
@@ -884,7 +897,20 @@ func (module *setter) AddUserRole(ctx context.Context, orgID, userID valuer.UUID
// create user_role entry
userRoles := authtypes.NewUserRoles(userID, foundRoles)
if err := module.userRoleStore.CreateUserRoles(ctx, userRoles); err != nil {
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.userRoleStore.DeleteUserRoles(ctx, existingUser.ID)
if err != nil {
return err
}
err := module.userRoleStore.CreateUserRoles(ctx, userRoles)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}

View File

@@ -121,9 +121,7 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
)
for idx, ptr := range slots {
key := telemetrytypes.GetFieldKeyFromCanonicalName(colNames[idx])
// Remove the data type and field context to make it generic
key = telemetrytypes.TelemetryFieldKey{Name: key.Name}
name := colNames[idx]
switch v := ptr.(type) {
case *time.Time:
@@ -131,20 +129,20 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
case *float64, *float32, *int64, *int32, *uint64, *uint32:
val := numericAsFloat(reflect.ValueOf(ptr).Elem().Interface())
if m := aggRe.FindStringSubmatch(key.Name); m != nil {
if m := aggRe.FindStringSubmatch(name); m != nil {
id, _ := strconv.Atoi(m[1])
aggValues[id] = val
} else if numericColsCount == 1 { // classic single-value query
fallbackValue = val
fallbackSeen = true
} else if slices.Contains(legacyReservedColumnTargetAliases, key.Name) {
} else if slices.Contains(legacyReservedColumnTargetAliases, name) {
fallbackValue = val
fallbackSeen = true
} else {
// numeric label
lblVals = append(lblVals, fmt.Sprint(val))
lblObjs = append(lblObjs, &qbtypes.Label{
Key: key,
Key: telemetrytypes.TelemetryFieldKey{Name: name},
Value: val,
})
}
@@ -153,20 +151,20 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
tempVal := reflect.ValueOf(ptr)
if tempVal.IsValid() && !tempVal.IsNil() && !tempVal.Elem().IsNil() {
val := numericAsFloat(tempVal.Elem().Elem().Interface())
if m := aggRe.FindStringSubmatch(key.Name); m != nil {
if m := aggRe.FindStringSubmatch(name); m != nil {
id, _ := strconv.Atoi(m[1])
aggValues[id] = val
} else if numericColsCount == 1 { // classic single-value query
fallbackValue = val
fallbackSeen = true
} else if slices.Contains(legacyReservedColumnTargetAliases, key.Name) {
} else if slices.Contains(legacyReservedColumnTargetAliases, name) {
fallbackValue = val
fallbackSeen = true
} else {
// numeric label
lblVals = append(lblVals, fmt.Sprint(val))
lblObjs = append(lblObjs, &qbtypes.Label{
Key: key,
Key: telemetrytypes.TelemetryFieldKey{Name: name},
Value: val,
})
}
@@ -175,7 +173,7 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
case *string:
lblVals = append(lblVals, *v)
lblObjs = append(lblObjs, &qbtypes.Label{
Key: key,
Key: telemetrytypes.TelemetryFieldKey{Name: name},
Value: *v,
})
@@ -187,7 +185,7 @@ func readAsTimeSeries(rows driver.Rows, queryWindow *qbtypes.TimeRange, step qbt
}
lblVals = append(lblVals, *val)
lblObjs = append(lblObjs, &qbtypes.Label{
Key: key,
Key: telemetrytypes.TelemetryFieldKey{Name: name},
Value: *val,
})
@@ -287,16 +285,12 @@ func readAsScalar(rows driver.Rows, queryName string) (*qbtypes.ScalarData, erro
var aggIndex int64
for i, name := range colNames {
key := telemetrytypes.GetFieldKeyFromCanonicalName(name)
// Remove the data type and field context to make it generic
key = telemetrytypes.TelemetryFieldKey{Name: key.Name}
colType := qbtypes.ColumnTypeGroup
if aggRe.MatchString(key.Name) {
if aggRe.MatchString(name) {
colType = qbtypes.ColumnTypeAggregation
}
cd[i] = &qbtypes.ColumnDescriptor{
TelemetryFieldKey: key,
TelemetryFieldKey: telemetrytypes.TelemetryFieldKey{Name: name},
QueryName: queryName,
AggregationIndex: aggIndex,
Type: colType,
@@ -379,8 +373,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
var outRows []*qbtypes.RawRow
translator := newTranslationMap(colNames, colTypes)
for rows.Next() {
// fresh copy of the scan slice (otherwise the driver reuses pointers)
scan := make([]any, colCnt)
@@ -397,27 +389,36 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
}
for i, cellPtr := range scan {
name := colNames[i]
// de-reference the typed pointer to any
val := reflect.ValueOf(cellPtr).Elem().Interface()
translator, ok := translator[colNames[i]]
if !ok {
continue
// Post-process JSON columns: normalize into String value
if strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON") {
switch x := val.(type) {
case []byte:
val = string(x)
default:
// already a structured type (map[string]any, []any, etc.)
}
}
rr.Data[translator.Name] = translator.Transform(val)
}
if ts, ok := rr.Data["timestamp"]; ok {
if t, ok := ts.(time.Time); ok {
rr.Timestamp = t
// special-case: timestamp column
if name == "timestamp" || name == "timestamp_datetime" {
switch t := val.(type) {
case time.Time:
rr.Timestamp = t
case uint64: // epoch-ns stored as integer
rr.Timestamp = time.Unix(0, int64(t))
case int64:
rr.Timestamp = time.Unix(0, t)
default:
// leave zero time if unrecognised
}
}
}
if ts, ok := rr.Data["timestamp_datetime"]; ok {
if t, ok := ts.(time.Time); ok {
rr.Timestamp = t
}
rr.Data[name] = val
}
outRows = append(outRows, &rr)
}
if err := rows.Err(); err != nil {
@@ -430,78 +431,6 @@ func readAsRaw(rows driver.Rows, queryName string) (*qbtypes.RawData, error) {
}, nil
}
type TranslationMap map[string]TranslationEntry
type TranslationEntry struct {
Name string
Transform func(any) any
}
func newTranslationMap(colNames []string, colTypes []driver.ColumnType) TranslationMap {
// First pass: compute output names with collision detection.
// If two columns share the same simple name, the later one falls back to key.Text().
seen := make(map[string]bool, len(colNames))
outputNames := make([]string, len(colNames))
foundDefaultTimestampField := false
foundLegacyTimestampField := false
for i, colName := range colNames {
if colName == "log;timestamp;number" || colName == "span;timestamp;number" {
foundDefaultTimestampField = true
}
if colName == "timestamp" || colName == "timestamp_datetime" {
foundLegacyTimestampField = true
}
key := telemetrytypes.GetFieldKeyFromCanonicalName(colName)
name := key.Name
if seen[name] {
name = key.Text()
}
seen[name] = true
outputNames[i] = name
}
// Second pass: build per-column transform functions.
tm := make(TranslationMap, len(colNames))
for i, colName := range colNames {
name := outputNames[i]
isJSON := strings.HasPrefix(strings.ToUpper(colTypes[i].DatabaseTypeName()), "JSON")
var transform func(any) any
switch {
case isJSON:
transform = func(v any) any {
if b, ok := v.([]byte); ok {
return string(b)
}
return v
}
case foundDefaultTimestampField && name == "timestamp",
!foundDefaultTimestampField && foundLegacyTimestampField && (name == "timestamp" || name == "timestamp_datetime"):
transform = func(v any) any {
switch t := v.(type) {
case time.Time:
return t
case uint64:
return time.Unix(0, int64(t))
case int64:
return time.Unix(0, t)
default:
return v
}
}
default:
transform = func(v any) any { return v }
}
tm[colName] = TranslationEntry{Name: name, Transform: transform}
}
return tm
}
// numericAsFloat converts numeric types to float64 efficiently.
func numericAsFloat(v any) float64 {
switch x := v.(type) {

View File

@@ -139,9 +139,6 @@ func WithRuleStateHistoryModule(module rulestatehistory.Module) RuleOption {
}
func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, opts ...RuleOption) (*BaseRule, error) {
if p.RuleCondition == nil || !p.RuleCondition.IsValid() {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid rule condition")
}
threshold, err := p.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
return nil, err

View File

@@ -320,6 +320,38 @@ func (m *Manager) Stop(_ context.Context) {
m.logger.Info("rule manager stopped")
}
// validateChannels checks that every channel referenced by the rule
// exists as a notification channel for the given org.
func (m *Manager) validateChannels(ctx context.Context, orgID string, rule *ruletypes.PostableRule) error {
channels := rule.Channels()
if len(channels) == 0 {
return nil
}
orgChannels, err := m.alertmanager.ListChannels(ctx, orgID)
if err != nil {
return err
}
known := make(map[string]struct{}, len(orgChannels))
for _, ch := range orgChannels {
known[ch.Name] = struct{}{}
}
var unknown []string
for _, name := range channels {
if _, ok := known[name]; !ok {
unknown = append(unknown, name)
}
}
if len(unknown) > 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput,
"channels: the following channels do not exist: %v", unknown)
}
return nil
}
// EditRule writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
@@ -336,7 +368,12 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID)
if err != nil {
return err
}
if err := parsedRule.Validate(); err != nil {
return err
}
if err := m.validateChannels(ctx, claims.OrgID, &parsedRule); err != nil {
return err
}
existingRule, err := m.ruleStore.GetStoredRule(ctx, id)
if err != nil {
return err
@@ -533,7 +570,12 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge
if err != nil {
return nil, err
}
if err := parsedRule.Validate(); err != nil {
return nil, err
}
if err := m.validateChannels(ctx, claims.OrgID, &parsedRule); err != nil {
return nil, err
}
now := time.Now()
storedRule := &ruletypes.Rule{
Identifiable: types.Identifiable{
@@ -920,7 +962,12 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID)
m.logger.ErrorContext(ctx, "failed to unmarshal patched rule with given id", slog.String("rule.id", id.StringValue()), errors.Attr(err))
return nil, err
}
if err := storedRule.Validate(); err != nil {
return nil, err
}
if err := m.validateChannels(ctx, claims.OrgID, &storedRule); err != nil {
return nil, err
}
// deploy or un-deploy task according to patched (new) rule state
if err := m.syncRuleStateWithTask(ctx, orgID, taskName, &storedRule); err != nil {
m.logger.ErrorContext(ctx, "failed to sync stored rule state with the task", slog.String("task.name", taskName), errors.Attr(err))
@@ -971,6 +1018,12 @@ func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleS
if err != nil {
return 0, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to unmarshal rule")
}
if err := parsedRule.Validate(); err != nil {
return 0, err
}
if err := m.validateChannels(ctx, orgID.StringValue(), &parsedRule); err != nil {
return 0, err
}
if !parsedRule.NotificationSettings.UsePolicy {
parsedRule.NotificationSettings.GroupBy = append(parsedRule.NotificationSettings.GroupBy, ruletypes.LabelThresholdName)
}

View File

@@ -298,42 +298,3 @@ func AdjustKeysForAliasExpressions[T any](query *qbtypes.QueryBuilderQuery[T], r
}
return actions
}
// maybeAddDefaultSelectFields prepends default fields (id, timestamp) to query.SelectFields
// if they are not already covered by an existing entry.
// A select field covers a default field when the names match and the select field's
// context is either unspecified or equal to the default field's context.
// 3 cases:
// 1. log.timestamp:number in select fields; we skip
// 2. timestamp is present in select fields; we skip
// 3. attribute.timestamp in select fields; we add it
func MaybeAddDefaultSelectFields[T any](
query *qbtypes.QueryBuilderQuery[T],
defaultSelectFields []telemetrytypes.TelemetryFieldKey,
) {
var toAdd []telemetrytypes.TelemetryFieldKey
for i := range defaultSelectFields {
if !isDefaultSelectFieldCovered(&defaultSelectFields[i], query.SelectFields) {
toAdd = append(toAdd, defaultSelectFields[i])
}
}
if len(toAdd) > 0 {
query.SelectFields = append(toAdd, query.SelectFields...)
}
}
// isDefaultSelectFieldCovered returns true if any entry in selectFields covers the
// given default field. A field covers a default when the names match and the field's
// context is either unspecified or equal to the default's context.
func isDefaultSelectFieldCovered(defaultField *telemetrytypes.TelemetryFieldKey, selectFields []telemetrytypes.TelemetryFieldKey) bool {
for i := range selectFields {
if selectFields[i].Name != defaultField.Name {
continue
}
if selectFields[i].FieldContext == telemetrytypes.FieldContextUnspecified ||
selectFields[i].FieldContext == defaultField.FieldContext {
return true
}
}
return false
}

View File

@@ -60,12 +60,7 @@ func CollisionHandledFinalExpr(
}
fieldExpression, fieldForErr := fm.FieldFor(ctx, startNs, endNs, field)
if fieldForErr != nil {
if !errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
// fm.FieldFor returned an error
return "", nil, fieldForErr
}
if errors.Is(fieldForErr, qbtypes.ErrColumnNotFound) {
// the key didn't have the right context to be added to the query
// we try to use the context we know of
keysForField := keys[field.Name]

View File

@@ -437,6 +437,7 @@ func New(
tokenizer,
config,
modules.AuthDomain,
modules.ServiceAccount,
}
// Initialize stats reporter from the available stats reporter provider factories

View File

@@ -1,7 +1,10 @@
package telemetrylogs
import (
"fmt"
"github.com/SigNoz/signoz-otel-collector/constants"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -32,7 +35,6 @@ const (
LogsV2AttributesNumberColumn = "attributes_number"
LogsV2AttributesBoolColumn = "attributes_bool"
LogsV2ResourcesStringColumn = "resources_string"
LogsV2ResourceColumn = "resource"
LogsV2ScopeStringColumn = "scope_string"
BodyV2ColumnPrefix = constants.BodyV2ColumnPrefix
@@ -126,15 +128,10 @@ var (
}
)
var LogsV2DefaultSelectFields = []telemetrytypes.TelemetryFieldKey{
{
Name: LogsV2IDColumn,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeString,
},
{
Name: LogsV2TimestampColumn,
FieldContext: telemetrytypes.FieldContextLog,
FieldDataType: telemetrytypes.FieldDataTypeNumber,
},
func bodyAliasExpression() string {
if !querybuilder.BodyJSONQueryEnabled {
return LogsV2BodyColumn
}
return fmt.Sprintf("%s as body", LogsV2BodyV2Column)
}

View File

@@ -395,7 +395,7 @@ func (m *fieldMapper) ColumnExpressionFor(
}
}
return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(fieldExpression), field.CanonicalName()), nil
return fmt.Sprintf("%s AS `%s`", sqlbuilder.Escape(fieldExpression), field.Name), nil
}
// buildFieldForJSON builds the field expression for body JSON fields using arrayConcat pattern.

File diff suppressed because one or more lines are too long

View File

@@ -266,35 +266,33 @@ func (b *logQueryStatementBuilder) buildListQuery(
cteArgs = append(cteArgs, args)
}
// Select timestamp and id by default
sb.Select(LogsV2TimestampColumn)
sb.SelectMore(LogsV2IDColumn)
if len(query.SelectFields) == 0 {
// Select default fields
for index := range LogsV2DefaultSelectFields {
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &LogsV2DefaultSelectFields[index], keys)
if err != nil {
return nil, err
}
sb.SelectMore(colExpr)
}
// Select all columns by default with dummy canonical aliases
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2TraceIDColumn, LogsV2TraceIDColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2SpanIDColumn, LogsV2SpanIDColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2TraceFlagsColumn, LogsV2TraceFlagsColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2SeverityTextColumn, LogsV2SeverityTextColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2SeverityNumberColumn, LogsV2SeverityNumberColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2ScopeNameColumn, LogsV2ScopeNameColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2ScopeVersionColumn, LogsV2ScopeVersionColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2BodyColumn, LogsV2BodyColumn))
if querybuilder.BodyJSONQueryEnabled {
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2BodyV2Column, LogsV2BodyV2Column))
}
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2AttributesStringColumn, LogsV2AttributesStringColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2AttributesNumberColumn, LogsV2AttributesNumberColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2AttributesBoolColumn, LogsV2AttributesBoolColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2ResourcesStringColumn, LogsV2ResourcesStringColumn))
sb.SelectMore(fmt.Sprintf("`%s` AS `;%s;`", LogsV2ScopeStringColumn, LogsV2ScopeStringColumn))
// Select all default columns
sb.SelectMore(LogsV2TraceIDColumn)
sb.SelectMore(LogsV2SpanIDColumn)
sb.SelectMore(LogsV2TraceFlagsColumn)
sb.SelectMore(LogsV2SeverityTextColumn)
sb.SelectMore(LogsV2SeverityNumberColumn)
sb.SelectMore(LogsV2ScopeNameColumn)
sb.SelectMore(LogsV2ScopeVersionColumn)
sb.SelectMore(bodyAliasExpression())
sb.SelectMore(LogsV2AttributesStringColumn)
sb.SelectMore(LogsV2AttributesNumberColumn)
sb.SelectMore(LogsV2AttributesBoolColumn)
sb.SelectMore(LogsV2ResourcesStringColumn)
sb.SelectMore(LogsV2ScopeStringColumn)
} else {
querybuilder.MaybeAddDefaultSelectFields(&query, LogsV2DefaultSelectFields)
// Select specified columns
for index := range query.SelectFields {
if query.SelectFields[index].Name == LogsV2TimestampColumn || query.SelectFields[index].Name == LogsV2IDColumn {
continue
}
// get column expression for the field - use array index directly to avoid pointer to loop variable
colExpr, err := b.fm.ColumnExpressionFor(ctx, start, end, &query.SelectFields[index], keys)
if err != nil {
return nil, err
@@ -385,10 +383,10 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.CanonicalName())
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.TelemetryFieldKey.CanonicalName()))
fieldNames = append(fieldNames, fmt.Sprintf("`%s`", gb.Name))
}
// Aggregations
@@ -465,9 +463,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
} else {
sb.GroupBy("ts")
for _, gb := range query.GroupBy {
sb.GroupBy(fmt.Sprintf("`%s`", gb.TelemetryFieldKey.CanonicalName()))
}
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
if query.Having != nil && query.Having.Expression != "" {
rewriter := querybuilder.NewHavingExpressionRewriter()
rewrittenExpr, err := rewriter.RewriteForLogs(query.Having.Expression, query.Aggregations)
@@ -481,7 +477,7 @@ func (b *logQueryStatementBuilder) buildTimeSeriesQuery(
for _, orderBy := range query.Order {
_, ok := aggOrderBy(orderBy, query)
if !ok {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.CanonicalName(), orderBy.Direction.StringValue()))
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
}
}
sb.OrderBy("ts desc")
@@ -540,7 +536,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
return nil, err
}
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.TelemetryFieldKey.CanonicalName())
colExpr := fmt.Sprintf("toString(%s) AS `%s`", expr, gb.Name)
allGroupByArgs = append(allGroupByArgs, args...)
sb.SelectMore(colExpr)
}
@@ -575,9 +571,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
}
// Group by dimensions
for _, gb := range query.GroupBy {
sb.GroupBy(fmt.Sprintf("`%s`", gb.TelemetryFieldKey.CanonicalName()))
}
sb.GroupBy(querybuilder.GroupByKeys(query.GroupBy)...)
// Add having clause if needed
if query.Having != nil && query.Having.Expression != "" {
@@ -595,7 +589,7 @@ func (b *logQueryStatementBuilder) buildScalarQuery(
if ok {
sb.OrderBy(fmt.Sprintf("__result_%d %s", idx, orderBy.Direction.StringValue()))
} else {
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.CanonicalName(), orderBy.Direction.StringValue()))
sb.OrderBy(fmt.Sprintf("`%s` %s", orderBy.Key.Name, orderBy.Direction.StringValue()))
}
}

View File

@@ -55,7 +55,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `resource;service.name;string`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `resource;service.name;string` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `resource;service.name;string`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`resource;service.name;string`) GLOBAL IN (SELECT `resource;service.name;string` FROM __limit_cte) GROUP BY ts, `service.name`",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, countDistinct(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name`",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
expectedErr: nil,
@@ -127,7 +127,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `resource;service.name;string`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `resource;service.name;string` ORDER BY `resource;service.name;string` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `resource;service.name;string`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`resource;service.name;string`) GLOBAL IN (SELECT `resource;service.name;string` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `service.name` ORDER BY `service.name` desc LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(resource.`service.name`::String IS NOT NULL, resource.`service.name`::String, NULL)) AS `service.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`service.name`) GLOBAL IN (SELECT `service.name` FROM __limit_cte) GROUP BY ts, `service.name` ORDER BY `service.name` desc, ts desc",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
expectedErr: nil,
@@ -160,7 +160,7 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `attribute;materialized.key.name;string`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `attribute;materialized.key.name;string` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `attribute;materialized.key.name;string`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`attribute;materialized.key.name;string`) GLOBAL IN (SELECT `attribute;materialized.key.name;string` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?), __limit_cte AS (SELECT toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? GROUP BY `materialized.key.name` ORDER BY __result_0 DESC LIMIT ?) SELECT toStartOfInterval(fromUnixTimestamp64Nano(timestamp), INTERVAL 30 SECOND) AS ts, toString(multiIf(`attribute_string_materialized$$key$$name_exists` = ?, `attribute_string_materialized$$key$$name`, NULL)) AS `materialized.key.name`, count() AS __result_0 FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? AND (`materialized.key.name`) GLOBAL IN (SELECT `materialized.key.name` FROM __limit_cte) GROUP BY ts, `materialized.key.name`",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1705397400), uint64(1705485600), true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600), 10, true, "1705399200000000000", uint64(1705397400), "1705485600000000000", uint64(1705485600)},
},
},
@@ -249,7 +249,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -277,7 +277,7 @@ func TestStatementBuilderListQuery(t *testing.T) {
},
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY `attribute_string_materialized$$key$$name` AS `attribute;materialized.key.name;string` desc LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE (simpleJSONExtractString(labels, 'service.name') = ? AND labels LIKE ? AND labels LIKE ?) AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND true AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? ORDER BY `attribute_string_materialized$$key$$name` AS `materialized.key.name` desc LIMIT ?",
Args: []any{"cartservice", "%service.name%", "%service.name\":\"cartservice%", uint64(1747945619), uint64(1747983448), "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -370,7 +370,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "hello", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -414,7 +414,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (JSON_VALUE(body, '$.\"status\"') = ? AND JSON_EXISTS(body, '$.\"status\"')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND (JSON_VALUE(body, '$.\"status\"') = ? AND JSON_EXISTS(body, '$.\"status\"')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "success", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -430,7 +430,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND ((JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)') = ?) AND JSON_EXISTS(body, '$.\"user_names\"[*]')) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -446,7 +446,7 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
Limit: 10,
},
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND has(JSONExtract(JSON_QUERY(body, '$.\"user_names\"[*]'), 'Array(String)'), ?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "john_doe", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -900,7 +900,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `body_v2` AS `;body_v2;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body_v2.message <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body_v2.message <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{bodySearchDefaultWarning},
},
@@ -916,7 +916,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body <> ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -931,7 +931,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `body_v2` AS `;body_v2;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body_v2.message = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body_v2.message = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{bodySearchDefaultWarning},
},
@@ -947,7 +947,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND body = ? AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -962,7 +962,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `body_v2` AS `;body_v2;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body_v2.message) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body_v2.message) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
Warnings: []string{bodySearchDefaultWarning},
},
@@ -978,7 +978,7 @@ func TestStmtBuilderBodyField(t *testing.T) {
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND LOWER(body) LIKE LOWER(?) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "%error%", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -1051,7 +1051,7 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
},
enableBodyJSONQuery: true,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `body_v2` AS `;body_v2;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body_v2 as body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body_v2.message), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,
@@ -1066,7 +1066,7 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
},
enableBodyJSONQuery: false,
expected: qbtypes.Statement{
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT `id` AS `;id;`, `timestamp` AS `;timestamp;`, `trace_id` AS `;trace_id;`, `span_id` AS `;span_id;`, `trace_flags` AS `;trace_flags;`, `severity_text` AS `;severity_text;`, `severity_number` AS `;severity_number;`, `scope_name` AS `;scope_name;`, `scope_version` AS `;scope_version;`, `body` AS `;body;`, `attributes_string` AS `;attributes_string;`, `attributes_number` AS `;attributes_number;`, `attributes_bool` AS `;attributes_bool;`, `resources_string` AS `;resources_string;`, `resource` AS `;resource;`, `scope_string` AS `;scope_string;` FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Query: "WITH __resource_filter AS (SELECT fingerprint FROM signoz_logs.distributed_logs_v2_resource WHERE true AND seen_at_ts_bucket_start >= ? AND seen_at_ts_bucket_start <= ?) SELECT timestamp, id, trace_id, span_id, trace_flags, severity_text, severity_number, scope_name, scope_version, body, attributes_string, attributes_number, attributes_bool, resources_string, scope_string FROM signoz_logs.distributed_logs_v2 WHERE resource_fingerprint GLOBAL IN (SELECT fingerprint FROM __resource_filter) AND match(LOWER(body), LOWER(?)) AND timestamp >= ? AND ts_bucket_start >= ? AND timestamp < ? AND ts_bucket_start <= ? LIMIT ?",
Args: []any{uint64(1747945619), uint64(1747983448), "error", "1747947419000000000", uint64(1747945619), "1747983448000000000", uint64(1747983448), 10},
},
expectedErr: nil,

View File

@@ -136,4 +136,5 @@ type RouteStore interface {
GetAllByKind(ctx context.Context, orgID string, kind ExpressionKind) ([]*RoutePolicy, error)
GetAllByName(ctx context.Context, orgID string, name string) ([]*RoutePolicy, error)
DeleteRouteByName(ctx context.Context, orgID string, name string) error
GetAll(ctx context.Context, orgID string) ([]*RoutePolicy, error)
}

View File

@@ -226,7 +226,6 @@ type RawData struct {
type RawRow struct {
Timestamp time.Time `json:"timestamp"`
ID string `json:"id"`
Data map[string]any `json:"data"`
}

View File

@@ -103,7 +103,7 @@ type RuleCondition struct {
MatchType MatchType `json:"matchType"`
TargetUnit string `json:"targetUnit,omitempty"`
Algorithm string `json:"algorithm,omitempty"`
Seasonality string `json:"seasonality,omitempty"`
Seasonality Seasonality `json:"seasonality,omitzero"`
SelectedQuery string `json:"selectedQueryName,omitempty"`
RequireMinPoints bool `json:"requireMinPoints,omitempty"`
RequiredNumPoints int `json:"requiredNumPoints,omitempty"`
@@ -158,10 +158,6 @@ func (rc *RuleCondition) SelectedQueryName() string {
return keys[len(keys)-1]
}
func (rc *RuleCondition) IsValid() bool {
return true
}
// ShouldEval checks if the further series should be evaluated at all for alerts.
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
return !rc.RequireMinPoints || len(series.Values) >= rc.RequiredNumPoints

View File

@@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -25,7 +26,8 @@ const (
)
const (
DefaultSchemaVersion = "v1"
DefaultSchemaVersion = "v1"
SchemaVersionV2Alpha1 = "v2alpha1"
)
type RuleDataKind string
@@ -39,9 +41,9 @@ type PostableRule struct {
AlertName string `json:"alert"`
AlertType AlertType `json:"alertType,omitempty"`
Description string `json:"description,omitempty"`
RuleType RuleType `json:"ruleType,omitempty"`
EvalWindow valuer.TextDuration `json:"evalWindow,omitempty"`
Frequency valuer.TextDuration `json:"frequency,omitempty"`
RuleType RuleType `json:"ruleType,omitzero"`
EvalWindow valuer.TextDuration `json:"evalWindow,omitzero"`
Frequency valuer.TextDuration `json:"frequency,omitzero"`
RuleCondition *RuleCondition `json:"condition,omitempty"`
Labels map[string]string `json:"labels,omitempty"`
@@ -64,7 +66,7 @@ type PostableRule struct {
type NotificationSettings struct {
GroupBy []string `json:"groupBy,omitempty"`
Renotify Renotify `json:"renotify,omitempty"`
Renotify Renotify `json:"renotify,omitzero"`
UsePolicy bool `json:"usePolicy,omitempty"`
// NewGroupEvalDelay is the grace period for new series to be excluded from alerts evaluation
NewGroupEvalDelay valuer.TextDuration `json:"newGroupEvalDelay,omitzero"`
@@ -93,6 +95,28 @@ func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanager
return alertmanagertypes.NewNotificationConfig(ns.GroupBy, renotifyInterval, noDataRenotifyInterval, ns.UsePolicy)
}
// Channels returns all unique channel names referenced by the rule's thresholds.
func (r *PostableRule) Channels() []string {
if r.RuleCondition == nil || r.RuleCondition.Thresholds == nil {
return nil
}
threshold, err := r.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
return nil
}
seen := make(map[string]struct{})
var channels []string
for _, receiver := range threshold.GetRuleReceivers() {
for _, ch := range receiver.Channels {
if _, ok := seen[ch]; !ok {
seen[ch] = struct{}{}
channels = append(channels, ch)
}
}
}
return channels
}
func (r *PostableRule) GetRuleRouteRequest(ruleID string) ([]*alertmanagertypes.PostableRoutePolicy, error) {
threshold, err := r.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
@@ -185,15 +209,19 @@ func (r *PostableRule) processRuleDefaults() {
r.SchemaVersion = DefaultSchemaVersion
}
if r.EvalWindow.IsZero() {
r.EvalWindow = valuer.MustParseTextDuration("5m")
// v2alpha1 uses the Evaluation envelope for window/frequency;
// only default top-level fields for v1.
if r.SchemaVersion != SchemaVersionV2Alpha1 {
if r.EvalWindow.IsZero() {
r.EvalWindow = valuer.MustParseTextDuration("5m")
}
if r.Frequency.IsZero() {
r.Frequency = valuer.MustParseTextDuration("1m")
}
}
if r.Frequency.IsZero() {
r.Frequency = valuer.MustParseTextDuration("1m")
}
if r.RuleCondition != nil {
if r.RuleCondition != nil && r.RuleCondition.CompositeQuery != nil {
switch r.RuleCondition.CompositeQuery.QueryType {
case QueryTypeBuilder:
if r.RuleType.IsZero() {
@@ -259,6 +287,10 @@ func (r *PostableRule) MarshalJSON() ([]byte, error) {
aux.SchemaVersion = ""
aux.NotificationSettings = nil
return json.Marshal(aux)
case SchemaVersionV2Alpha1:
copyStruct := *r
aux := Alias(copyStruct)
return json.Marshal(aux)
default:
copyStruct := *r
aux := Alias(copyStruct)
@@ -292,23 +324,24 @@ func isValidLabelValue(v string) bool {
return utf8.ValidString(v)
}
// validate runs during UnmarshalJSON (read + write path).
// Preserves the original pre-existing checks only so that stored rules
// continue to load without errors.
func (r *PostableRule) validate() error {
var errs []error
if r.RuleCondition == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "rule condition is required")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition: field is required")
}
if r.Version != "v5" {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "version: only v5 is supported, got %q", r.Version))
}
for k, v := range r.Labels {
if !isValidLabelName(k) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label name: %s", k))
}
if !isValidLabelValue(v) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label value: %s", v))
}
@@ -321,7 +354,196 @@ func (r *PostableRule) validate() error {
}
errs = append(errs, testTemplateParsing(r)...)
return errors.Join(errs...)
joined := errors.Join(errs...)
if joined != nil {
return errors.WrapInvalidInputf(joined, errors.CodeInvalidInput, "validation failed")
}
return nil
}
// Validate enforces all validation rules. For now, this is invoked on the write path
// (create, update, patch, test) before persisting. This is intentionally
// not called from UnmarshalJSON so that existing stored rules can always
// be loaded regardless of new validation rules.
func (r *PostableRule) Validate() error {
var errs []error
if r.AlertName == "" {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "alert: field is required"))
}
if r.RuleCondition == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition: field is required")
}
if r.Version != "v5" {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "version: only v5 is supported, got %q", r.Version))
}
if r.AlertType != "" {
switch r.AlertType {
case AlertTypeMetric, AlertTypeTraces, AlertTypeLogs, AlertTypeExceptions:
default:
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"alertType: unsupported value %q; must be one of %q, %q, %q, %q",
r.AlertType, AlertTypeMetric, AlertTypeTraces, AlertTypeLogs, AlertTypeExceptions))
}
}
if !r.RuleType.IsZero() {
if err := r.RuleType.Validate(); err != nil {
errs = append(errs, err)
}
}
if r.RuleType == RuleTypeAnomaly && !r.RuleCondition.Seasonality.IsZero() {
if err := r.RuleCondition.Seasonality.Validate(); err != nil {
errs = append(errs, err)
}
}
if r.RuleCondition.CompositeQuery == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.compositeQuery: field is required"))
} else {
if len(r.RuleCondition.CompositeQuery.Queries) == 0 {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.compositeQuery.queries: must have at least one query"))
} else {
cq := &qbtypes.CompositeQuery{Queries: r.RuleCondition.CompositeQuery.Queries}
if err := cq.Validate(qbtypes.GetValidationOptions(qbtypes.RequestTypeTimeSeries)...); err != nil {
errs = append(errs, err)
}
}
}
if r.RuleCondition.SelectedQuery != "" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) > 0 {
found := false
for _, query := range r.RuleCondition.CompositeQuery.Queries {
if query.GetQueryName() == r.RuleCondition.SelectedQuery {
found = true
break
}
}
if !found {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.selectedQueryName: %q does not match any query in compositeQuery",
r.RuleCondition.SelectedQuery))
}
}
if r.RuleCondition.RequireMinPoints && r.RuleCondition.RequiredNumPoints <= 0 {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.requiredNumPoints: must be greater than 0 when requireMinPoints is enabled"))
}
errs = append(errs, r.validateSchemaVersion()...)
for k, v := range r.Labels {
if !isValidLabelName(k) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label name: %s", k))
}
if !isValidLabelValue(v) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label value: %s", v))
}
}
for k := range r.Annotations {
if !isValidLabelName(k) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid annotation name: %s", k))
}
}
errs = append(errs, testTemplateParsing(r)...)
joined := errors.Join(errs...)
if joined != nil {
return errors.WrapInvalidInputf(joined, errors.CodeInvalidInput, "validation failed")
}
return nil
}
func (r *PostableRule) validateSchemaVersion() []error {
switch r.SchemaVersion {
case DefaultSchemaVersion:
return r.validateV1()
case SchemaVersionV2Alpha1:
return r.validateV2Alpha1()
default:
return []error{errors.NewInvalidInputf(errors.CodeInvalidInput,
"schemaVersion: unsupported value %q; must be one of %q, %q",
r.SchemaVersion, DefaultSchemaVersion, SchemaVersionV2Alpha1)}
}
}
func (r *PostableRule) validateV1() []error {
var errs []error
if r.RuleCondition.Target == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.target: field is required for schemaVersion %q", DefaultSchemaVersion))
}
if r.RuleCondition.CompareOperator.IsZero() {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.op: field is required for schemaVersion %q", DefaultSchemaVersion))
} else if err := r.RuleCondition.CompareOperator.Validate(); err != nil {
errs = append(errs, err)
}
if r.RuleCondition.MatchType.IsZero() {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.matchType: field is required for schemaVersion %q", DefaultSchemaVersion))
} else if err := r.RuleCondition.MatchType.Validate(); err != nil {
errs = append(errs, err)
}
return errs
}
func (r *PostableRule) validateV2Alpha1() []error {
var errs []error
// TODO(srikanthccv): reject v1-only fields?
// if r.RuleCondition.Target != nil {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "condition.target: field is not used in schemaVersion %q; set target in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// if !r.RuleCondition.CompareOperator.IsZero() {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "condition.op: field is not used in schemaVersion %q; set op in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// if !r.RuleCondition.MatchType.IsZero() {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "condition.matchType: field is not used in schemaVersion %q; set matchType in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// if len(r.PreferredChannels) > 0 {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "preferredChannels: field is not used in schemaVersion %q; set channels in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// Require v2alpha1-specific fields
if r.RuleCondition.Thresholds == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.thresholds: field is required for schemaVersion %q", SchemaVersionV2Alpha1))
}
if r.Evaluation == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"evaluation: field is required for schemaVersion %q", SchemaVersionV2Alpha1))
}
if r.NotificationSettings == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"notificationSettings: field is required for schemaVersion %q", SchemaVersionV2Alpha1))
} else {
if r.NotificationSettings.Renotify.Enabled && !r.NotificationSettings.Renotify.ReNotifyInterval.IsPositive() {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"notificationSettings.renotify.interval: must be a positive duration when renotify is enabled"))
}
}
return errs
}
func testTemplateParsing(rl *PostableRule) (errs []error) {
@@ -393,6 +615,10 @@ func (g *GettableRule) MarshalJSON() ([]byte, error) {
aux.SchemaVersion = ""
aux.NotificationSettings = nil
return json.Marshal(aux)
case SchemaVersionV2Alpha1:
copyStruct := *g
aux := Alias(copyStruct)
return json.Marshal(aux)
default:
copyStruct := *g
aux := Alias(copyStruct)

View File

@@ -34,15 +34,15 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"expression": "A",
"disabled": false,
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 10.0,
"matchType": "1",
@@ -77,14 +77,15 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"disabled": false,
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 5.0,
"matchType": "1",
@@ -112,12 +113,14 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "promql",
"promQueries": {
"A": {
"queries": [{
"type": "promql",
"spec": {
"name": "A",
"query": "rate(http_requests_total[5m])",
"disabled": false
}
}
}]
},
"target": 10.0,
"matchType": "1",
@@ -165,12 +168,13 @@ func TestParseIntoRule(t *testing.T) {
func TestParseIntoRuleSchemaVersioning(t *testing.T) {
tests := []struct {
name string
initRule PostableRule
content []byte
kind RuleDataKind
expectError bool
validate func(*testing.T, *PostableRule)
name string
initRule PostableRule
content []byte
kind RuleDataKind
expectError bool // unmarshal error (read path)
expectValidateError bool // Validate() error (write path only)
validate func(*testing.T, *PostableRule)
}{
{
name: "schema v1 - threshold name from severity label",
@@ -182,13 +186,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "cpu_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "cpu_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
},
}],
"unit": "percent"
},
"target": 85.0,
@@ -271,13 +277,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "memory_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "memory_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 90.0,
"matchType": "1",
@@ -312,13 +320,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "cpu_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "cpu_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
},
}],
"unit": "percent"
},
"target": 80.0,
@@ -394,49 +404,253 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
},
},
{
name: "schema v2 - does not populate thresholds and evaluation",
name: "schema v2alpha1 - uses explicit thresholds and evaluation",
initRule: PostableRule{},
content: []byte(`{
"alert": "V2Test",
"schemaVersion": "v2",
"alert": "V2Alpha1Test",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"thresholds": {
"kind": "basic",
"spec": [{
"name": "critical",
"target": 100.0,
"matchType": "1",
"op": "1"
}]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
}`),
kind: RuleDataKindJson,
expectError: false,
validate: func(t *testing.T, rule *PostableRule) {
if rule.SchemaVersion != SchemaVersionV2Alpha1 {
t.Errorf("Expected schemaVersion %q, got %q", SchemaVersionV2Alpha1, rule.SchemaVersion)
}
if rule.RuleCondition.Thresholds == nil {
t.Error("Expected Thresholds to be present for v2alpha1")
}
if rule.Evaluation == nil {
t.Error("Expected Evaluation to be present for v2alpha1")
}
if rule.NotificationSettings == nil {
t.Error("Expected NotificationSettings to be present for v2alpha1")
}
if rule.RuleType != RuleTypeThreshold {
t.Error("Expected RuleType to be auto-detected")
}
},
},
{
name: "schema v2alpha1 - rejects v1-only fields with suggestions",
initRule: PostableRule{},
content: []byte(`{
"alert": "MixedFieldsTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"preferredChannels": ["slack"],
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 100.0,
"matchType": "1",
"op": "1"
}
}`),
kind: RuleDataKindJson,
expectError: false,
validate: func(t *testing.T, rule *PostableRule) {
if rule.SchemaVersion != "v2" {
t.Errorf("Expected schemaVersion 'v2', got '%s'", rule.SchemaVersion)
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "schema v2alpha1 - requires evaluation",
initRule: PostableRule{},
content: []byte(`{
"alert": "MissingEvalTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"thresholds": {
"kind": "basic",
"spec": [{
"name": "critical",
"target": 100.0,
"matchType": "1",
"op": "1"
}]
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
if rule.RuleCondition.Thresholds != nil {
t.Error("Expected Thresholds to be nil for v2")
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "schema v2alpha1 - requires notificationSettings",
initRule: PostableRule{},
content: []byte(`{
"alert": "MissingNotifTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"thresholds": {
"kind": "basic",
"spec": [{
"name": "critical",
"target": 100.0,
"matchType": "1",
"op": "1"
}]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
}
if rule.Evaluation != nil {
t.Error("Expected Evaluation to be nil for v2")
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "schema v2alpha1 - requires thresholds for non-promql rules",
initRule: PostableRule{},
content: []byte(`{
"alert": "MissingThresholdsTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
if rule.EvalWindow.Duration() != 5*time.Minute {
t.Error("Expected default EvalWindow to be applied")
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "unsupported schema version",
initRule: PostableRule{},
content: []byte(`{
"alert": "BadSchemaTest",
"schemaVersion": "v3",
"version": "v5",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 100.0,
"matchType": "1",
"op": "1"
}
if rule.RuleType != RuleTypeThreshold {
t.Error("Expected RuleType to be auto-detected")
}
},
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "default schema version - defaults to v1 behavior",
@@ -447,13 +661,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 75.0,
"matchType": "1",
@@ -480,13 +696,23 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
rule := tt.initRule
err := json.Unmarshal(tt.content, &rule)
if tt.expectError && err == nil {
t.Errorf("Expected error but got none")
if tt.expectError {
if err == nil {
t.Errorf("Expected unmarshal error but got none")
}
return
}
if !tt.expectError && err != nil {
t.Errorf("Unexpected error: %v", err)
if err != nil {
t.Errorf("Unexpected unmarshal error: %v", err)
return
}
if tt.validate != nil && err == nil {
if tt.expectValidateError {
if err := rule.Validate(); err == nil {
t.Errorf("Expected Validate() error but got none")
}
return
}
if tt.validate != nil {
tt.validate(t, &rule)
}
})
@@ -500,15 +726,15 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"expression": "A",
"disabled": false,
"aggregateAttribute": {
"key": "response_time"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "response_time", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 100.0,
"matchType": "1",
@@ -571,7 +797,7 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
func TestParseIntoRuleMultipleThresholds(t *testing.T) {
content := []byte(`{
"schemaVersion": "v2",
"schemaVersion": "v2alpha1",
"alert": "MultiThresholdAlert",
"ruleType": "threshold_rule",
"version": "v5",
@@ -579,19 +805,16 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
"compositeQuery": {
"queryType": "builder",
"unit": "%",
"builderQueries": {
"A": {
"expression": "A",
"disabled": false,
"aggregateAttribute": {
"key": "cpu_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "cpu_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 90.0,
"matchType": "1",
"op": "1",
"selectedQuery": "A",
"thresholds": {
"kind": "basic",
@@ -616,6 +839,20 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
}
]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
}`)
rule := PostableRule{}

View File

@@ -54,6 +54,29 @@ func (CompareOperator) Enum() []any {
}
}
// Normalize returns the canonical (numeric) form of the operator.
// This ensures evaluation logic can use simple == checks against the canonical values.
func (c CompareOperator) Normalize() CompareOperator {
switch c {
case ValueIsAbove, ValueIsAboveLiteral, ValueIsAboveSymbol:
return ValueIsAbove
case ValueIsBelow, ValueIsBelowLiteral, ValueIsBelowSymbol:
return ValueIsBelow
case ValueIsEq, ValueIsEqLiteral, ValueIsEqLiteralShort, ValueIsEqSymbol:
return ValueIsEq
case ValueIsNotEq, ValueIsNotEqLiteral, ValueIsNotEqLiteralShort, ValueIsNotEqSymbol:
return ValueIsNotEq
case ValueAboveOrEq, ValueAboveOrEqLiteral, ValueAboveOrEqLiteralShort, ValueAboveOrEqSymbol:
return ValueAboveOrEq
case ValueBelowOrEq, ValueBelowOrEqLiteral, ValueBelowOrEqLiteralShort, ValueBelowOrEqSymbol:
return ValueBelowOrEq
case ValueOutsideBounds, ValueOutsideBoundsLiteral:
return ValueOutsideBounds
default:
return c
}
}
func (c CompareOperator) Validate() error {
switch c {
case ValueIsAbove,
@@ -70,10 +93,18 @@ func (c CompareOperator) Validate() error {
ValueIsNotEqLiteral,
ValueIsNotEqLiteralShort,
ValueIsNotEqSymbol,
ValueAboveOrEq,
ValueAboveOrEqLiteral,
ValueAboveOrEqLiteralShort,
ValueAboveOrEqSymbol,
ValueBelowOrEq,
ValueBelowOrEqLiteral,
ValueBelowOrEqLiteralShort,
ValueBelowOrEqSymbol,
ValueOutsideBounds,
ValueOutsideBoundsLiteral:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown comparison operator, known values are: ")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.op: unsupported value %q; must be one of above, below, equal, not_equal, above_or_equal, below_or_equal, outside_bounds", c.StringValue())
}
}

View File

@@ -11,7 +11,7 @@ type MatchType struct {
var (
AtleastOnce = MatchType{valuer.NewString("1")}
AtleastOnceLiteral = MatchType{valuer.NewString("atleast_once")}
AtleastOnceLiteral = MatchType{valuer.NewString("at_least_once")}
AllTheTimes = MatchType{valuer.NewString("2")}
AllTheTimesLiteral = MatchType{valuer.NewString("all_the_times")}
@@ -38,6 +38,24 @@ func (MatchType) Enum() []any {
}
}
// Normalize returns the canonical (numeric) form of the match type.
func (m MatchType) Normalize() MatchType {
switch m {
case AtleastOnce, AtleastOnceLiteral:
return AtleastOnce
case AllTheTimes, AllTheTimesLiteral:
return AllTheTimes
case OnAverage, OnAverageLiteral, OnAverageShort:
return OnAverage
case InTotal, InTotalLiteral, InTotalShort:
return InTotal
case Last, LastLiteral:
return Last
default:
return m
}
}
func (m MatchType) Validate() error {
switch m {
case
@@ -55,6 +73,6 @@ func (m MatchType) Validate() error {
LastLiteral:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown match type operator, known values are")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.matchType: unsupported value %q; must be one of at_least_once, all_the_times, on_average, in_total, last", m.StringValue())
}
}

View File

@@ -31,6 +31,6 @@ func (r RuleType) Validate() error {
RuleTypeAnomaly:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown rule type, known values are")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "ruleType: unsupported value %q; must be one of threshold_rule, promql_rule, anomaly_rule", r.StringValue())
}
}

View File

@@ -0,0 +1,35 @@
package ruletypes
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Seasonality struct {
valuer.String
}
var (
SeasonalityHourly = Seasonality{valuer.NewString("hourly")}
SeasonalityDaily = Seasonality{valuer.NewString("daily")}
SeasonalityWeekly = Seasonality{valuer.NewString("weekly")}
)
func (Seasonality) Enum() []any {
return []any{
SeasonalityHourly,
SeasonalityDaily,
SeasonalityWeekly,
}
}
func (s Seasonality) Validate() error {
switch s {
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.seasonality: unsupported value %q; must be one of hourly, daily, weekly",
s.StringValue())
}
}

View File

@@ -113,6 +113,9 @@ func (r BasicRuleThresholds) GetRuleReceivers() []RuleReceivers {
}
func (r BasicRuleThresholds) Validate() error {
if len(r) == 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.thresholds.spec: must have at least one threshold")
}
var errs []error
for _, basicThreshold := range r {
if err := basicThreshold.Validate(); err != nil {
@@ -189,7 +192,7 @@ func sortThresholds(thresholds []BasicRuleThreshold) {
targetI := thresholds[i].target(thresholds[i].TargetUnit) //for sorting we dont need rule unit
targetJ := thresholds[j].target(thresholds[j].TargetUnit)
switch thresholds[i].CompareOperator {
switch thresholds[i].CompareOperator.Normalize() {
case ValueIsAbove, ValueAboveOrEq, ValueOutsideBounds:
// For "above" operations, sort descending (higher values first)
return targetI > targetJ
@@ -234,16 +237,11 @@ func (b BasicRuleThreshold) Validate() error {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "target value cannot be nil"))
}
switch b.CompareOperator {
case ValueIsAbove, ValueIsBelow, ValueIsEq, ValueIsNotEq, ValueAboveOrEq, ValueBelowOrEq, ValueOutsideBounds:
default:
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid compare operation: %s", b.CompareOperator.StringValue()))
if err := b.CompareOperator.Validate(); err != nil {
errs = append(errs, err)
}
switch b.MatchType {
case AtleastOnce, AllTheTimes, OnAverage, InTotal, Last:
default:
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid match type: %s", b.MatchType.StringValue()))
if err := b.MatchType.Validate(); err != nil {
errs = append(errs, err)
}
return errors.Join(errs...)
@@ -268,6 +266,33 @@ func PrepareSampleLabelsForRule(seriesLabels []*qbtypes.Label, thresholdName str
return lb.Labels()
}
// matchesCompareOp checks if a value matches the compare operator against target.
func matchesCompareOp(op CompareOperator, value, target float64) bool {
switch op {
case ValueIsAbove:
return value > target
case ValueIsBelow:
return value < target
case ValueIsEq:
return value == target
case ValueIsNotEq:
return value != target
case ValueAboveOrEq:
return value >= target
case ValueBelowOrEq:
return value <= target
case ValueOutsideBounds:
return math.Abs(value) >= target
default:
return false
}
}
// negatesCompareOp checks if a value does NOT match the compare operator against target.
func negatesCompareOp(op CompareOperator, value, target float64) bool {
return !matchesCompareOp(op, value, target)
}
func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, target float64) (Sample, bool) {
var shouldAlert bool
var alertSmpl Sample
@@ -278,63 +303,35 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
return alertSmpl, false
}
switch b.MatchType {
// Normalize to canonical forms so evaluation uses simple == checks
op := b.CompareOperator.Normalize()
matchType := b.MatchType.Normalize()
switch matchType {
case AtleastOnce:
// If any sample matches the condition, the rule is firing.
if b.CompareOperator == ValueIsAbove {
for _, smpl := range series.Values {
if smpl.Value > target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueIsBelow {
for _, smpl := range series.Values {
if smpl.Value < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueIsEq {
for _, smpl := range series.Values {
if smpl.Value == target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueIsNotEq {
for _, smpl := range series.Values {
if smpl.Value != target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueOutsideBounds {
for _, smpl := range series.Values {
if math.Abs(smpl.Value) >= target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
for _, smpl := range series.Values {
if matchesCompareOp(op, smpl.Value, target) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
case AllTheTimes:
// If all samples match the condition, the rule is firing.
shouldAlert = true
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
if b.CompareOperator == ValueIsAbove {
for _, smpl := range series.Values {
if smpl.Value <= target {
shouldAlert = false
break
}
for _, smpl := range series.Values {
if negatesCompareOp(op, smpl.Value, target) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
break
}
// use min value from the series
if shouldAlert {
}
if shouldAlert {
switch op {
case ValueIsAbove, ValueAboveOrEq, ValueOutsideBounds:
// use min value from the series
var minValue = math.Inf(1)
for _, smpl := range series.Values {
if smpl.Value < minValue {
@@ -342,15 +339,8 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
}
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
}
} else if b.CompareOperator == ValueIsBelow {
for _, smpl := range series.Values {
if smpl.Value >= target {
shouldAlert = false
break
}
}
if shouldAlert {
case ValueIsBelow, ValueBelowOrEq:
// use max value from the series
var maxValue = math.Inf(-1)
for _, smpl := range series.Values {
if smpl.Value > maxValue {
@@ -358,23 +348,8 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
}
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
}
} else if b.CompareOperator == ValueIsEq {
for _, smpl := range series.Values {
if smpl.Value != target {
shouldAlert = false
break
}
}
} else if b.CompareOperator == ValueIsNotEq {
for _, smpl := range series.Values {
if smpl.Value == target {
shouldAlert = false
break
}
}
// use any non-inf or nan value from the series
if shouldAlert {
case ValueIsNotEq:
// use any non-inf and non-nan value from the series
for _, smpl := range series.Values {
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
@@ -382,14 +357,6 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
}
}
} else if b.CompareOperator == ValueOutsideBounds {
for _, smpl := range series.Values {
if math.Abs(smpl.Value) < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
break
}
}
}
case OnAverage:
// If the average of all samples matches the condition, the rule is firing.
@@ -403,32 +370,10 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
avg := sum / count
alertSmpl = Sample{Point: Point{V: avg}, Metric: lbls}
switch b.CompareOperator {
case ValueIsAbove:
if avg > target {
shouldAlert = true
}
case ValueIsBelow:
if avg < target {
shouldAlert = true
}
case ValueIsEq:
if avg == target {
shouldAlert = true
}
case ValueIsNotEq:
if avg != target {
shouldAlert = true
}
case ValueOutsideBounds:
if math.Abs(avg) >= target {
shouldAlert = true
}
}
shouldAlert = matchesCompareOp(op, avg, target)
case InTotal:
// If the sum of all samples matches the condition, the rule is firing.
var sum float64
for _, smpl := range series.Values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
@@ -436,50 +381,12 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
sum += smpl.Value
}
alertSmpl = Sample{Point: Point{V: sum}, Metric: lbls}
switch b.CompareOperator {
case ValueIsAbove:
if sum > target {
shouldAlert = true
}
case ValueIsBelow:
if sum < target {
shouldAlert = true
}
case ValueIsEq:
if sum == target {
shouldAlert = true
}
case ValueIsNotEq:
if sum != target {
shouldAlert = true
}
case ValueOutsideBounds:
if math.Abs(sum) >= target {
shouldAlert = true
}
}
shouldAlert = matchesCompareOp(op, sum, target)
case Last:
// If the last sample matches the condition, the rule is firing.
shouldAlert = false
alertSmpl = Sample{Point: Point{V: series.Values[len(series.Values)-1].Value}, Metric: lbls}
switch b.CompareOperator {
case ValueIsAbove:
if series.Values[len(series.Values)-1].Value > target {
shouldAlert = true
}
case ValueIsBelow:
if series.Values[len(series.Values)-1].Value < target {
shouldAlert = true
}
case ValueIsEq:
if series.Values[len(series.Values)-1].Value == target {
shouldAlert = true
}
case ValueIsNotEq:
if series.Values[len(series.Values)-1].Value != target {
shouldAlert = true
}
}
lastValue := series.Values[len(series.Values)-1].Value
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
shouldAlert = matchesCompareOp(op, lastValue, target)
}
return alertSmpl, shouldAlert
}

File diff suppressed because it is too large Load Diff

View File

@@ -243,6 +243,7 @@ type Store interface {
// Service Account Role
CreateServiceAccountRole(context.Context, *ServiceAccountRole) error
DeleteServiceAccountRoles(context.Context, valuer.UUID) error
DeleteServiceAccountRole(context.Context, valuer.UUID, valuer.UUID) error
// Service Account Factor API Key

View File

@@ -112,10 +112,6 @@ func (f TelemetryFieldKey) Text() string {
return TelemetryFieldKeyToText(&f)
}
func (f TelemetryFieldKey) CanonicalName() string {
return TelemetryFieldKeyToCanonicalName(&f)
}
// OverrideMetadataFrom copies the resolved metadata fields from src into f.
// This is used when adjusting user-provided keys to match known field definitions.
func (f *TelemetryFieldKey) OverrideMetadataFrom(src *TelemetryFieldKey) {
@@ -244,34 +240,6 @@ func TelemetryFieldKeyToText(key *TelemetryFieldKey) string {
return sb.String()
}
func TelemetryFieldKeyToCanonicalName(key *TelemetryFieldKey) string {
return fmt.Sprintf("%s;%s;%s", key.FieldContext.StringValue(), key.Name, key.FieldDataType.StringValue())
}
func GetFieldKeyFromCanonicalName(canonicalName string) TelemetryFieldKey {
parts := strings.Split(canonicalName, ";")
switch len(parts) {
case 3:
return TelemetryFieldKey{
FieldContext: FieldContext{valuer.NewString(parts[0])},
Name: parts[1],
FieldDataType: FieldDataType{valuer.NewString(parts[2])},
}
case 2:
return TelemetryFieldKey{
FieldContext: FieldContext{valuer.NewString(parts[0])},
Name: parts[1],
FieldDataType: FieldDataTypeUnspecified,
}
default:
return TelemetryFieldKey{
Name: canonicalName,
}
}
}
func FieldKeyToMaterializedColumnName(key *TelemetryFieldKey) string {
return fmt.Sprintf("`%s_%s_%s`",
key.FieldContext.String,

View File

@@ -74,14 +74,6 @@ def pytest_addoption(parser: pytest.Parser):
default="3.7.1",
help="zookeeper version",
)
parser.addoption(
"--force-recreate",
action="store",
default=None,
help="Force-recreate a cached fixture by deleting the existing one and creating a new one. "
"Pass a comma-separated list of fixture keys (e.g. --force-recreate=clickhouse,zookeeper). "
"Use with --reuse to re-cache the newly created fixture.",
)
parser.addoption(
"--schema-migrator-version",
action="store",

View File

@@ -17,14 +17,6 @@ def teardown(request: pytest.FixtureRequest) -> bool:
return request.config.getoption("--teardown")
def force_recreate(request: pytest.FixtureRequest, key: str) -> bool:
raw = request.config.getoption("--force-recreate")
if not raw:
return False
keys = [k.strip() for k in raw.split(",")]
return key in keys
def get_cached_resource(pytestconfig: pytest.Config, key: str):
"""Get a resource from pytest cache by key."""
return pytestconfig.cache.get(key, None)
@@ -61,21 +53,6 @@ def wrap( # pylint: disable=too-many-arguments,too-many-positional-arguments
"""
resource = empty()
if force_recreate(request, key):
existing_resource = pytestconfig.cache.get(key, None)
if existing_resource:
assert isinstance(existing_resource, dict)
logger.info(
"Force-recreating %s, deleting existing resource(%s)",
key,
existing_resource,
)
old_resource = restore(existing_resource)
delete(old_resource)
pytestconfig.cache.set(key, None)
else:
logger.info("Force-recreating %s, no existing resource found in cache", key)
if reuse(request):
existing_resource = pytestconfig.cache.get(key, None)
if existing_resource:

View File

@@ -14,8 +14,8 @@ QUERY_TIMEOUT = 30 # seconds
@dataclass
class TelemetryFieldKey:
name: str
field_data_type: Optional[str] = None
field_context: Optional[str] = None
field_data_type: str
field_context: str
def to_dict(self) -> Dict:
return {

View File

@@ -133,11 +133,11 @@ def test_get_user_roles(
assert "type" in role
def test_assign_additional_role(
def test_assign_role_replaces_previous(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify POST /api/v2/users/{id}/roles assigns an additional role."""
"""Verify POST /api/v2/users/{id}/roles replaces existing role."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
@@ -166,8 +166,8 @@ def test_assign_additional_role(
assert response.status_code == HTTPStatus.OK
roles = response.json()["data"]
names = {r["name"] for r in roles}
assert "signoz-admin" in names
assert "signoz-editor" in names
assert "signoz-admin" not in names
def test_get_users_by_role(
@@ -253,9 +253,7 @@ def test_remove_role(
)
assert response.status_code == HTTPStatus.OK
roles_after = response.json()["data"]
names = {r["name"] for r in roles_after}
assert "signoz-editor" not in names
assert "signoz-admin" in names
assert len(roles_after) == 0
def test_user_with_roles_reflects_change(
@@ -282,8 +280,7 @@ def test_user_with_roles_reflects_change(
assert response.status_code == HTTPStatus.OK
data = response.json()["data"]
role_names = {ur["role"]["name"] for ur in data["userRoles"]}
assert "signoz-admin" in role_names
assert "signoz-editor" not in role_names
assert len(role_names) == 0
def test_admin_cannot_assign_role_to_self(

View File

@@ -1,7 +1,6 @@
import json
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
from typing import Callable, List, Any
from typing import Callable, List
import pytest
import requests
@@ -14,14 +13,8 @@ from fixtures.querier import (
find_named_result,
index_series_by_label,
make_query_request,
BuilderQuery,
TelemetryFieldKey,
OrderBy,
)
from src.querier.util import (
assert_identical_query_response,
generate_logs_with_corrupt_metadata,
)
from src.querier.util import assert_identical_query_response
def test_logs_list(
@@ -406,117 +399,172 @@ def test_logs_list(
assert "d-001" in values
@pytest.mark.parametrize(
"query,result",
[
pytest.param(
BuilderQuery(
signal="logs",
name="A",
limit=1,
),
lambda x: _flatten_log(x[2]),
),
pytest.param(
BuilderQuery(
signal="logs",
name="A",
limit=1,
order=[OrderBy(TelemetryFieldKey("timestamp"), "desc")],
),
lambda x: _flatten_log(x[3]),
),
pytest.param(
BuilderQuery(
signal="logs",
name="A",
limit=1,
order=[OrderBy(TelemetryFieldKey("attribute.timestamp"), "desc")],
),
lambda x: _flatten_log(x[0]),
),
pytest.param(
BuilderQuery(
signal="logs",
name="A",
limit=1,
order=[OrderBy(TelemetryFieldKey("id"), "desc")],
),
lambda x: _flatten_log(x[3]),
),
pytest.param(
BuilderQuery(
signal="logs",
name="A",
limit=1,
order=[OrderBy(TelemetryFieldKey("attribute.id"), "desc")],
),
lambda x: _flatten_log(x[2]),
),
pytest.param(
BuilderQuery(
signal="logs",
name="A",
select_fields=[
TelemetryFieldKey("timestamp"),
TelemetryFieldKey("id"),
],
order=[
OrderBy(TelemetryFieldKey("timestamp"), "desc"),
OrderBy(TelemetryFieldKey("id"), "desc"),
],
limit=1,
),
lambda x: [
x[3].id,
x[3].timestamp,
],
),
],
)
def test_logs_list_with_corrupt_data(
signoz: types.SigNoz,
create_user_admin: None, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
insert_logs: Callable[[List[Logs]], None],
query: BuilderQuery,
result: Callable[[List[Logs]], List[Any]],
) -> None:
"""
Setup:
Insert logs with corrupt data
Insert 2 logs with different attributes
Tests:
1. Query logs for the last 10 seconds and check if the logs are returned in the correct order
2. Query values of severity_text attribute from the autocomplete API
3. Query values of severity_text attribute from the fields API
4. Query values of code.file attribute from the autocomplete API
5. Query values of code.file attribute from the fields API
6. Query values of code.line attribute from the autocomplete API
7. Query values of code.line attribute from the fields API
"""
logs = generate_logs_with_corrupt_metadata()
insert_logs(logs)
insert_logs(
[
Logs(
timestamp=datetime.now(tz=timezone.utc) - timedelta(seconds=1),
resources={
"deployment.environment": "production",
"service.name": "java",
"os.type": "linux",
"host.name": "linux-001",
"cloud.provider": "integration",
"cloud.account.id": "001",
"timestamp": "2024-01-01T00:00:00Z",
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"code.line": 120,
"telemetry.sdk.language": "java",
"id": "1",
},
body="This is a log message, coming from a java application",
severity_text="DEBUG",
),
Logs(
timestamp=datetime.now(tz=timezone.utc),
resources={
"deployment.environment": "production",
"service.name": "go",
"os.type": "linux",
"host.name": "linux-001",
"cloud.provider": "integration",
"cloud.account.id": "001",
"id": 2,
},
attributes={
"log.iostream": "stdout",
"logtag": "F",
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"code.line": 120,
"metric.domain_id": "d-001",
"telemetry.sdk.language": "go",
"timestamp": "invalid-timestamp",
},
body="This is a log message, coming from a go application",
severity_text="INFO",
),
]
)
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Query Logs for the last 1 minute and check if the logs are returned in the correct order
response = make_query_request(
signoz,
token,
start_ms=int(
(datetime.now(tz=timezone.utc) - timedelta(minutes=1)).timestamp() * 1000
),
end_ms=int(datetime.now(tz=timezone.utc).timestamp() * 1000),
request_type="raw",
queries=[query.to_dict()],
# Query Logs for the last 10 seconds and check if the logs are returned in the correct order
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v5/query_range"),
timeout=2,
headers={
"authorization": f"Bearer {token}",
},
json={
"schemaVersion": "v1",
"start": int(
(datetime.now(tz=timezone.utc) - timedelta(seconds=10)).timestamp()
* 1000
),
"end": int(datetime.now(tz=timezone.utc).timestamp() * 1000),
"requestType": "raw",
"compositeQuery": {
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "logs",
"disabled": False,
"limit": 100,
"offset": 0,
"order": [
{"key": {"name": "timestamp"}, "direction": "desc"},
{"key": {"name": "id"}, "direction": "desc"},
],
"having": {"expression": ""},
"aggregations": [{"expression": "count()"}],
},
}
]
},
"formatOptions": {"formatTableResultForUI": False, "fillGaps": False},
},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
if response.status_code == HTTPStatus.OK:
if not result(logs):
# No results expected
assert response.json()["data"]["data"]["results"][0]["rows"] is None
else:
data = response.json()["data"]["data"]["results"][0]["rows"][0]["data"]
# Cannot compare values as they are randomly generated
for key, value in zip(list(data.keys()), result(logs)):
assert data[key] == value
results = response.json()["data"]["data"]["results"]
assert len(results) == 1
rows = results[0]["rows"]
assert len(rows) == 2
assert (
rows[0]["data"]["body"] == "This is a log message, coming from a go application"
)
assert rows[0]["data"]["resources_string"] == {
"cloud.account.id": "001",
"cloud.provider": "integration",
"deployment.environment": "production",
"host.name": "linux-001",
"os.type": "linux",
"service.name": "go",
"id": "2",
}
assert rows[0]["data"]["attributes_string"] == {
"code.file": "/opt/integration.go",
"code.function": "com.example.Integration.process",
"log.iostream": "stdout",
"logtag": "F",
"metric.domain_id": "d-001",
"telemetry.sdk.language": "go",
"timestamp": "invalid-timestamp",
}
assert rows[0]["data"]["attributes_number"] == {"code.line": 120}
assert (
rows[1]["data"]["body"]
== "This is a log message, coming from a java application"
)
assert rows[1]["data"]["resources_string"] == {
"cloud.account.id": "001",
"cloud.provider": "integration",
"deployment.environment": "production",
"host.name": "linux-001",
"os.type": "linux",
"service.name": "java",
"timestamp": "2024-01-01T00:00:00Z",
}
assert rows[1]["data"]["attributes_string"] == {
"code.file": "/opt/Integration.java",
"code.function": "com.example.Integration.process",
"id": "1",
"log.iostream": "stdout",
"logtag": "F",
"telemetry.sdk.language": "java",
}
assert rows[1]["data"]["attributes_number"] == {"code.line": 120}
@pytest.mark.parametrize(
@@ -2444,49 +2492,3 @@ def test_logs_fill_zero_formula_with_group_by(
expected_by_ts=expectations[service_name],
context=f"logs/fillZero/F1/{service_name}",
)
def _flatten_log(log: Logs) -> List[Any]:
return [
log.attributes_bool,
log.attributes_number,
log.attributes_string,
log.body,
log.id,
_convert_dict_to_clickhouse_json_string(log.resources_string),
log.resources_string,
log.scope_name,
log.scope_string,
log.scope_version,
log.severity_number,
log.severity_text,
log.span_id,
log.timestamp,
log.trace_flags,
log.trace_id
]
def _convert_dict_to_clickhouse_json_string(data: dict) -> str:
"""
Convert a flat dictionary with dotted keys to a nested ClickHouse JSON string.
Example:
{
"deployment.environment": "production",
"service.name": "http-service",
"os.type": "linux",
"host.name": "linux-000",
"cloud.provider": "integration",
"cloud.account.id": "000",
"body": "corrupt_data",
}
is converted to
'{"body":"corrupt_data","cloud":{"account":{"id":"000"},"provider":"integration"},"deployment":{"environment":"production"},"host":{"name":"linux-000"},"os":{"type":"linux"},"service":{"name":"http-service"}}'
"""
nested = {}
for key, value in data.items():
parts = key.split(".")
current = nested
for part in parts[:-1]:
current = current.setdefault(part, {})
current[parts[-1]] = value
return json.dumps(nested, sort_keys=True, separators=(",", ":"))

View File

@@ -4,7 +4,6 @@ from typing import List
import requests
from fixtures.logs import Logs
from fixtures.traces import TraceIdGenerator, Traces, TracesKind, TracesStatusCode
@@ -39,99 +38,6 @@ def assert_identical_query_response(
), "Response data do not match"
def generate_logs_with_corrupt_metadata() -> List[Logs]:
"""
Specifically, entries with 'id', 'timestamp', 'severity_text', 'severity_number' and 'body' fields in metadata
"""
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
return [
Logs(
timestamp=now - timedelta(seconds=4),
body="POST /integration request received",
severity_text="INFO",
resources={
"deployment.environment": "production",
"service.name": "http-service",
"os.type": "linux",
"host.name": "linux-000",
"cloud.provider": "integration",
"cloud.account.id": "000",
"timestamp": "corrupt_data",
},
attributes={
"net.transport": "IP.TCP",
"http.scheme": "http",
"http.user_agent": "Integration Test",
"http.request.method": "POST",
"http.response.status_code": "200",
"severity_text": "corrupt_data",
"timestamp": "corrupt_data",
},
),
Logs(
timestamp=now - timedelta(seconds=3),
body="SELECT query executed",
severity_text="DEBUG",
resources={
"deployment.environment": "production",
"service.name": "http-service",
"os.type": "linux",
"host.name": "linux-000",
"cloud.provider": "integration",
"cloud.account.id": "000",
"severity_number": "corrupt_data",
"id": "corrupt_data",
},
attributes={
"db.name": "integration",
"db.operation": "SELECT",
"db.statement": "SELECT * FROM integration",
},
),
Logs(
timestamp=now - timedelta(seconds=2),
body="HTTP PATCH failed with 404",
severity_text="WARN",
resources={
"deployment.environment": "production",
"service.name": "http-service",
"os.type": "linux",
"host.name": "linux-000",
"cloud.provider": "integration",
"cloud.account.id": "000",
"body": "corrupt_data",
},
attributes={
"http.request.method": "PATCH",
"http.status_code": "404",
"id": "1",
},
),
Logs(
timestamp=now - timedelta(seconds=1),
body="topic publish completed",
severity_text="ERROR",
resources={
"deployment.environment": "production",
"service.name": "topic-service",
"os.type": "linux",
"host.name": "linux-001",
"cloud.provider": "integration",
"cloud.account.id": "001",
},
attributes={
"message.type": "SENT",
"messaging.operation": "publish",
"messaging.message.id": "001",
"body": "corrupt_data",
"timestamp": "corrupt_data",
},
),
]
def generate_traces_with_corrupt_metadata() -> List[Traces]:
"""
Specifically, entries with 'id', 'timestamp', 'trace_id' and 'duration_nano' fields in metadata

View File

@@ -48,7 +48,7 @@ def test_assign_role_to_service_account(
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""POST /{id}/roles assigns a new role, verify via GET."""
"""POST /{id}/roles replaces existing role, verify via GET."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# create service account with viewer role
@@ -56,7 +56,7 @@ def test_assign_role_to_service_account(
signoz, token, "sa-assign-role", role="signoz-viewer"
)
# assign editor role additionally
# assign editor role (replaces viewer)
editor_role_id = find_role_by_name(signoz, token, "signoz-editor")
assign_resp = requests.post(
signoz.self.host_configs["8080"].get(
@@ -68,7 +68,7 @@ def test_assign_role_to_service_account(
)
assert assign_resp.status_code == HTTPStatus.NO_CONTENT, assign_resp.text
# verify both roles are present
# verify only editor role is present (viewer was replaced)
roles_resp = requests.get(
signoz.self.host_configs["8080"].get(
f"{SERVICE_ACCOUNT_BASE}/{service_account_id}/roles"
@@ -78,8 +78,9 @@ def test_assign_role_to_service_account(
)
assert roles_resp.status_code == HTTPStatus.OK, roles_resp.text
role_names = [r["name"] for r in roles_resp.json()["data"]]
assert "signoz-viewer" in role_names
assert len(role_names) == 1
assert "signoz-editor" in role_names
assert "signoz-viewer" not in role_names
def test_assign_role_idempotent(
@@ -87,7 +88,7 @@ def test_assign_role_idempotent(
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""POST same role twice succeeds (store uses ON CONFLICT DO NOTHING)."""
"""POST same role twice succeeds (replace with same role is idempotent)."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
service_account_id = create_service_account(
signoz, token, "sa-role-idempotent", role="signoz-viewer"
@@ -118,6 +119,66 @@ def test_assign_role_idempotent(
assert role_names.count("signoz-viewer") == 1
def test_assign_role_replaces_access(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""After role replacement, SA loses old permissions and gains new ones."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# create SA with viewer role and an API key
service_account_id, api_key = create_service_account_with_key(
signoz, token, "sa-role-replace-access", role="signoz-viewer"
)
# viewer should get 403 on admin-only endpoint
resp = requests.get(
signoz.self.host_configs["8080"].get(SERVICE_ACCOUNT_BASE),
headers={"SIGNOZ-API-KEY": api_key},
timeout=5,
)
assert (
resp.status_code == HTTPStatus.FORBIDDEN
), f"Expected 403 for viewer on admin endpoint, got {resp.status_code}: {resp.text}"
# assign admin role (replaces viewer)
admin_role_id = find_role_by_name(signoz, token, "signoz-admin")
assign_resp = requests.post(
signoz.self.host_configs["8080"].get(
f"{SERVICE_ACCOUNT_BASE}/{service_account_id}/roles"
),
json={"id": admin_role_id},
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)
assert assign_resp.status_code == HTTPStatus.NO_CONTENT, assign_resp.text
# SA should now have admin access
resp = requests.get(
signoz.self.host_configs["8080"].get(SERVICE_ACCOUNT_BASE),
headers={"SIGNOZ-API-KEY": api_key},
timeout=5,
)
assert (
resp.status_code == HTTPStatus.OK
), f"Expected 200 for admin on admin endpoint, got {resp.status_code}: {resp.text}"
# verify only admin role is present
roles_resp = requests.get(
signoz.self.host_configs["8080"].get(
f"{SERVICE_ACCOUNT_BASE}/{service_account_id}/roles"
),
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)
assert roles_resp.status_code == HTTPStatus.OK, roles_resp.text
role_names = [r["name"] for r in roles_resp.json()["data"]]
assert len(role_names) == 1
assert "signoz-admin" in role_names
assert "signoz-viewer" not in role_names
def test_remove_role_from_service_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument