Compare commits

...

8 Commits

Author SHA1 Message Date
Srikanth Chekuri
1eb27d0bc4 chore: add schema version specific validations (#10808)
Some checks are pending
build-staging / prepare (push) Waiting to run
build-staging / js-build (push) Blocked by required conditions
build-staging / go-build (push) Blocked by required conditions
build-staging / staging (push) Blocked by required conditions
Release Drafter / update_release_draft (push) Waiting to run
* chore: add schema version specific validations

* chore: address reivew comments

* chore: additional checks

* chore: adrress lint

* chore: more lint
2026-04-06 10:45:41 +00:00
Vikrant Gupta
55cce3c708 feat(authz): accept singular roles for user and service accounts (#10827)
* feat(authz): accept singular roles for user and service accounts

* feat(authz): update integration tests

* feat(authz): update integration tests

* feat: move role management to a single select flow on members and service account pages(temporarily)

* feat(authz): enable stats reporter for service accounts

* feat(authz): identity call for activating/deleting user

---------

Co-authored-by: SagarRajput-7 <sagar@signoz.io>
2026-04-06 10:41:56 +00:00
Vikrant Gupta
d677973d56 test(integration): add test cases for new user APIs (#10837)
Some checks failed
build-staging / prepare (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
* test(integration): add user_v2 tests

* test(integration): fix fmt

* test(integration): disable delete mode from tests

* test(integration): add response.text when the assertion fails

* test(integration): some renaming
2026-04-04 14:43:55 +00:00
Srikanth Chekuri
b5b89bb678 fix: several panic errors (#10817)
* fix: several panic errors

* fix: add recover for dashboard sql
2026-04-04 10:38:42 +00:00
Pandey
b5eab118dd refactor: move and internalize resource filter statement builder (#10824)
* refactor: move resourcefilter to pkg/telemetryresourcefilter

Move pkg/querybuilder/resourcefilter to pkg/telemetryresourcefilter
to align with the existing telemetry package naming convention
(telemetrylogs, telemetrytraces, telemetrymetrics, telemetrymeter).
The resource filter is a statement builder, not a query builder utility.

* refactor: internalize resource filter construction in statement builders

Each telemetry statement builder (logs, traces) now creates its own
resource filter internally instead of receiving it as an injected
dependency. This makes it impossible to wire the wrong resource table
and simplifies the provider.

Delete telemetryresourcefilter/tables.go — each telemetry package now
owns its resource table constant (LogsResourceV2TableName in
telemetrylogs, TracesResourceV3TableName in telemetrytraces).

* refactor: create field mapper and condition builder inside resource filter New

Remove fieldMapper and conditionBuilder params from
telemetryresourcefilter.New — they are always the same
(NewFieldMapper + NewConditionBuilder) so create them internally.
2026-04-04 10:27:46 +00:00
Vishal Sharma
98d0bdfe49 chore(frontend): sync pylon chat widget theme with app theme (#10830)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-04-04 09:29:08 +00:00
Vikrant Gupta
a71006b662 chore(user): add partial index for email,org_id (#10828)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
* chore(user): add partial index for email,org_id

* chore(user): fix integration test fmt

* chore(user): fix integration test lint
2026-04-03 20:27:04 +00:00
Vishal Sharma
769e36ec84 feat(frontend): add new onboarding datasource entries (#10829) 2026-04-03 20:13:08 +00:00
91 changed files with 4395 additions and 1189 deletions

View File

@@ -56,7 +56,6 @@ jobs:
- postgres
- sqlite
sqlite-mode:
- delete
- wal
clickhouse-version:
- 25.5.6
@@ -65,9 +64,6 @@ jobs:
- v0.142.0
postgres-version:
- 15
exclude:
- sqlstore-provider: postgres
sqlite-mode: wal
if: |
((github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork && github.event.pull_request.user.login != 'dependabot[bot]' && ! contains(github.event.pull_request.labels.*.name, 'safe-to-test')) ||
(github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'safe-to-test'))) && contains(github.event.pull_request.labels.*.name, 'safe-to-integrate')

View File

@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"log/slog"
"strings"
"sync"
"time"
@@ -64,12 +63,12 @@ func NewAnomalyRule(
BaseRule: baseRule,
}
switch strings.ToLower(p.RuleCondition.Seasonality) {
case "hourly":
switch p.RuleCondition.Seasonality {
case ruletypes.SeasonalityHourly:
t.seasonality = anomaly.SeasonalityHourly
case "daily":
case ruletypes.SeasonalityDaily:
t.seasonality = anomaly.SeasonalityDaily
case "weekly":
case ruletypes.SeasonalityWeekly:
t.seasonality = anomaly.SeasonalityWeekly
default:
t.seasonality = anomaly.SeasonalityDaily

View File

@@ -67,7 +67,7 @@ func TestAnomalyRule_NoData_AlertOnAbsent(t *testing.T) {
}},
},
SelectedQuery: "A",
Seasonality: "daily",
Seasonality: ruletypes.SeasonalityDaily,
Thresholds: &ruletypes.RuleThresholdData{
Kind: ruletypes.BasicThresholdKind,
Spec: ruletypes.BasicRuleThresholds{{
@@ -170,7 +170,7 @@ func TestAnomalyRule_NoData_AbsentFor(t *testing.T) {
}},
},
SelectedQuery: "A",
Seasonality: "daily",
Seasonality: ruletypes.SeasonalityDaily,
Thresholds: &ruletypes.RuleThresholdData{
Kind: ruletypes.BasicThresholdKind,
Spec: ruletypes.BasicRuleThresholds{{

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#1eb4d4" viewBox="0 0 24 24"><title>Hasura</title><path d="M23.558 8.172c.707-2.152.282-6.447-1.09-8.032a.42.42 0 0 0-.664.051l-1.69 2.59a1.32 1.32 0 0 1-1.737.276C16.544 1.885 14.354 1.204 12 1.204s-4.544.68-6.378 1.853a1.326 1.326 0 0 1-1.736-.276L2.196.191A.42.42 0 0 0 1.532.14C.16 1.728-.265 6.023.442 8.172c.236.716.3 1.472.16 2.207-.137.73-.276 1.61-.276 2.223C.326 18.898 5.553 24 11.997 24c6.447 0 11.671-5.105 11.671-11.398 0-.613-.138-1.494-.276-2.223a4.47 4.47 0 0 1 .166-2.207m-11.56 13.284c-4.984 0-9.036-3.96-9.036-8.827q0-.239.014-.473c.18-3.316 2.243-6.15 5.16-7.5 1.17-.546 2.481-.848 3.864-.848s2.69.302 3.864.85c2.917 1.351 4.98 4.187 5.16 7.501q.013.236.014.473c-.003 4.864-4.057 8.824-9.04 8.824m3.915-5.43-2.31-3.91-1.98-3.26a.26.26 0 0 0-.223-.125H9.508a.26.26 0 0 0-.227.13.25.25 0 0 0 .003.254l1.895 3.109-2.542 3.787a.25.25 0 0 0-.011.259.26.26 0 0 0 .23.132h1.905a.26.26 0 0 0 .218-.116l1.375-2.096 1.233 2.088a.26.26 0 0 0 .224.127h1.878c.094 0 .18-.049.224-.127a.24.24 0 0 0 0-.251z"/></svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="#ea4b71" viewBox="0 0 24 24"><title>n8n</title><path d="M21.474 5.684a2.53 2.53 0 0 0-2.447 1.895H16.13a2.526 2.526 0 0 0-2.492 2.11l-.103.624a1.26 1.26 0 0 1-1.246 1.055h-1.001a2.527 2.527 0 0 0-4.893 0H4.973a2.527 2.527 0 1 0 0 1.264h1.422a2.527 2.527 0 0 0 4.894 0h1a1.26 1.26 0 0 1 1.247 1.055l.103.623a2.526 2.526 0 0 0 2.492 2.111h.37a2.527 2.527 0 1 0 0-1.263h-.37a1.26 1.26 0 0 1-1.246-1.056l-.103-.623A2.52 2.52 0 0 0 13.96 12a2.52 2.52 0 0 0 .82-1.48l.104-.622a1.26 1.26 0 0 1 1.246-1.056h2.896a2.527 2.527 0 1 0 2.447-3.158m0 1.263a1.263 1.263 0 0 1 1.263 1.263 1.263 1.263 0 0 1-1.263 1.264A1.263 1.263 0 0 1 20.21 8.21a1.263 1.263 0 0 1 1.264-1.263m-18.948 3.79A1.263 1.263 0 0 1 3.79 12a1.263 1.263 0 0 1-1.264 1.263A1.263 1.263 0 0 1 1.263 12a1.263 1.263 0 0 1 1.263-1.263m6.316 0A1.263 1.263 0 0 1 10.105 12a1.263 1.263 0 0 1-1.263 1.263A1.263 1.263 0 0 1 7.58 12a1.263 1.263 0 0 1 1.263-1.263m10.106 3.79a1.263 1.263 0 0 1 1.263 1.263 1.263 1.263 0 0 1-1.263 1.263 1.263 1.263 0 0 1-1.264-1.263 1.263 1.263 0 0 1 1.263-1.264"/></svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.8 KiB

View File

@@ -18,7 +18,7 @@ import AppLayout from 'container/AppLayout';
import Hex from 'crypto-js/enc-hex';
import HmacSHA256 from 'crypto-js/hmac-sha256';
import { KeyboardHotkeysProvider } from 'hooks/hotkeys/useKeyboardHotkeys';
import { useThemeConfig } from 'hooks/useDarkMode';
import { useIsDarkMode, useThemeConfig } from 'hooks/useDarkMode';
import { useGetTenantLicense } from 'hooks/useGetTenantLicense';
import { NotificationProvider } from 'hooks/useNotifications';
import { ResourceProvider } from 'hooks/useResourceAttribute';
@@ -212,6 +212,12 @@ function App(): JSX.Element {
activeLicenseFetchError,
]);
const isDarkMode = useIsDarkMode();
useEffect(() => {
window.Pylon?.('setTheme', isDarkMode ? 'dark' : 'light');
}, [isDarkMode]);
useEffect(() => {
if (
pathname === ROUTES.ONBOARDING ||

View File

@@ -83,37 +83,6 @@
opacity: 0.6;
}
&__role-select {
width: 100%;
.ant-select-selector {
background-color: var(--l2-background) !important;
border-color: var(--border) !important;
border-radius: 2px;
padding: var(--padding-1) var(--padding-2) !important;
display: flex;
align-items: center;
flex-wrap: wrap;
min-height: 32px;
height: auto !important;
}
.ant-select-selection-item {
font-size: var(--font-size-sm);
color: var(--l1-foreground);
line-height: 22px;
letter-spacing: -0.07px;
}
.ant-select-arrow {
color: var(--foreground);
}
&:not(.ant-select-disabled):hover .ant-select-selector {
border-color: var(--foreground);
}
}
&__meta {
display: flex;
flex-direction: column;

View File

@@ -61,10 +61,6 @@ function toSaveApiError(err: unknown): APIError {
);
}
function areSortedArraysEqual(a: string[], b: string[]): boolean {
return JSON.stringify([...a].sort()) === JSON.stringify([...b].sort());
}
export interface EditMemberDrawerProps {
member: MemberRow | null;
open: boolean;
@@ -83,7 +79,7 @@ function EditMemberDrawer({
const { user: currentUser } = useAppContext();
const [localDisplayName, setLocalDisplayName] = useState('');
const [localRoles, setLocalRoles] = useState<string[]>([]);
const [localRole, setLocalRole] = useState('');
const [isSaving, setIsSaving] = useState(false);
const [saveErrors, setSaveErrors] = useState<SaveError[]>([]);
const [isGeneratingLink, setIsGeneratingLink] = useState(false);
@@ -133,14 +129,14 @@ function EditMemberDrawer({
}, [fetchedUserId, fetchedUserDisplayName, member?.name]);
useEffect(() => {
setLocalRoles(fetchedRoleIds);
setLocalRole(fetchedRoleIds[0] ?? '');
}, [fetchedRoleIds]);
const isDirty =
member !== null &&
fetchedUser != null &&
(localDisplayName !== fetchedDisplayName ||
!areSortedArraysEqual(localRoles, fetchedRoleIds));
localRole !== (fetchedRoleIds[0] ?? ''));
const { mutateAsync: updateMyUser } = useUpdateMyUserV2();
const { mutateAsync: updateUser } = useUpdateUser();
@@ -224,7 +220,7 @@ function EditMemberDrawer({
setIsSaving(true);
try {
const nameChanged = localDisplayName !== fetchedDisplayName;
const rolesChanged = !areSortedArraysEqual(localRoles, fetchedRoleIds);
const rolesChanged = localRole !== (fetchedRoleIds[0] ?? '');
const namePromise = nameChanged
? isSelf
@@ -237,7 +233,9 @@ function EditMemberDrawer({
const [nameResult, rolesResult] = await Promise.allSettled([
namePromise,
rolesChanged ? applyDiff(localRoles, availableRoles) : Promise.resolve([]),
rolesChanged
? applyDiff([localRole].filter(Boolean), availableRoles)
: Promise.resolve([]),
]);
const errors: SaveError[] = [];
@@ -255,7 +253,10 @@ function EditMemberDrawer({
context: 'Roles update',
apiError: toSaveApiError(rolesResult.reason),
onRetry: async (): Promise<void> => {
const failures = await applyDiff(localRoles, availableRoles);
const failures = await applyDiff(
[localRole].filter(Boolean),
availableRoles,
);
setSaveErrors((prev) => {
const rest = prev.filter((e) => e.context !== 'Roles update');
return [
@@ -303,7 +304,7 @@ function EditMemberDrawer({
isDirty,
isSelf,
localDisplayName,
localRoles,
localRole,
fetchedDisplayName,
fetchedRoleIds,
updateMyUser,
@@ -445,15 +446,10 @@ function EditMemberDrawer({
>
<div className="edit-member-drawer__input-wrapper edit-member-drawer__input-wrapper--disabled">
<div className="edit-member-drawer__disabled-roles">
{localRoles.length > 0 ? (
localRoles.map((roleId) => {
const role = availableRoles.find((r) => r.id === roleId);
return (
<Badge key={roleId} color="vanilla">
{role?.name ?? roleId}
</Badge>
);
})
{localRole ? (
<Badge color="vanilla">
{availableRoles.find((r) => r.id === localRole)?.name ?? localRole}
</Badge>
) : (
<span className="edit-member-drawer__email-text"></span>
)}
@@ -464,15 +460,14 @@ function EditMemberDrawer({
) : (
<RolesSelect
id="member-role"
mode="multiple"
roles={availableRoles}
loading={rolesLoading}
isError={rolesError}
error={rolesErrorObj}
onRefetch={refetchRoles}
value={localRoles}
onChange={(roles): void => {
setLocalRoles(roles);
value={localRole}
onChange={(role): void => {
setLocalRole(role);
setSaveErrors((prev) =>
prev.filter(
(err) =>
@@ -480,8 +475,7 @@ function EditMemberDrawer({
),
);
}}
className="edit-member-drawer__role-select"
placeholder="Select roles"
placeholder="Select role"
/>
)}
</div>

View File

@@ -5,7 +5,6 @@ import {
getResetPasswordToken,
useDeleteUser,
useGetUser,
useRemoveUserRoleByUserIDAndRoleID,
useSetRoleByUserID,
useUpdateMyUserV2,
useUpdateUser,
@@ -56,7 +55,6 @@ jest.mock('api/generated/services/users', () => ({
useUpdateUser: jest.fn(),
useUpdateMyUserV2: jest.fn(),
useSetRoleByUserID: jest.fn(),
useRemoveUserRoleByUserIDAndRoleID: jest.fn(),
getResetPasswordToken: jest.fn(),
}));
@@ -171,10 +169,6 @@ describe('EditMemberDrawer', () => {
mutateAsync: jest.fn().mockResolvedValue({}),
isLoading: false,
});
(useRemoveUserRoleByUserIDAndRoleID as jest.Mock).mockReturnValue({
mutateAsync: jest.fn().mockResolvedValue({}),
isLoading: false,
});
(useDeleteUser as jest.Mock).mockReturnValue({
mutate: mockDeleteMutate,
isLoading: false,
@@ -248,7 +242,7 @@ describe('EditMemberDrawer', () => {
expect(onClose).not.toHaveBeenCalled();
});
it('calls setRole when a new role is added', async () => {
it('selecting a different role calls setRole with the new role name', async () => {
const onComplete = jest.fn();
const user = userEvent.setup({ pointerEventsCheck: 0 });
const mockSet = jest.fn().mockResolvedValue({});
@@ -277,32 +271,30 @@ describe('EditMemberDrawer', () => {
});
});
it('calls removeRole when an existing role is removed', async () => {
it('does not call removeRole when the role is changed', async () => {
const onComplete = jest.fn();
const user = userEvent.setup({ pointerEventsCheck: 0 });
const mockRemove = jest.fn().mockResolvedValue({});
const mockSet = jest.fn().mockResolvedValue({});
(useRemoveUserRoleByUserIDAndRoleID as jest.Mock).mockReturnValue({
mutateAsync: mockRemove,
(useSetRoleByUserID as jest.Mock).mockReturnValue({
mutateAsync: mockSet,
isLoading: false,
});
renderDrawer({ onComplete });
// Wait for the signoz-admin tag to appear, then click its remove button
const adminTag = await screen.findByTitle('signoz-admin');
const removeBtn = adminTag.querySelector(
'.ant-select-selection-item-remove',
) as Element;
await user.click(removeBtn);
// Switch from signoz-admin to signoz-viewer using single-select
await user.click(screen.getByLabelText('Roles'));
await user.click(await screen.findByTitle('signoz-viewer'));
const saveBtn = screen.getByRole('button', { name: /save member details/i });
await waitFor(() => expect(saveBtn).not.toBeDisabled());
await user.click(saveBtn);
await waitFor(() => {
expect(mockRemove).toHaveBeenCalledWith({
pathParams: { id: 'user-1', roleId: managedRoles[0].id },
expect(mockSet).toHaveBeenCalledWith({
pathParams: { id: 'user-1' },
data: { name: 'signoz-viewer' },
});
expect(onComplete).toHaveBeenCalled();
});

View File

@@ -88,3 +88,13 @@
color: var(--destructive);
}
}
.roles-single-select {
.ant-select-selector {
min-height: 32px;
background-color: var(--l2-background) !important;
border: 1px solid var(--border) !important;
border-radius: 2px;
padding: 2px var(--padding-2) !important;
}
}

View File

@@ -158,10 +158,10 @@ function RolesSelect(props: RolesSelectProps): JSX.Element {
return (
<Select
id={id}
value={value}
value={value || undefined}
onChange={onChange}
placeholder={placeholder}
className={cx('roles-select', className)}
className={cx('roles-single-select', className)}
loading={loading}
notFoundContent={notFoundContent}
options={options}

View File

@@ -16,8 +16,8 @@ interface OverviewTabProps {
account: ServiceAccountRow;
localName: string;
onNameChange: (v: string) => void;
localRoles: string[];
onRolesChange: (v: string[]) => void;
localRole: string;
onRoleChange: (v: string) => void;
isDisabled: boolean;
availableRoles: AuthtypesRoleDTO[];
rolesLoading?: boolean;
@@ -31,8 +31,8 @@ function OverviewTab({
account,
localName,
onNameChange,
localRoles,
onRolesChange,
localRole,
onRoleChange,
isDisabled,
availableRoles,
rolesLoading,
@@ -96,15 +96,10 @@ function OverviewTab({
{isDisabled ? (
<div className="sa-drawer__input-wrapper sa-drawer__input-wrapper--disabled">
<div className="sa-drawer__disabled-roles">
{localRoles.length > 0 ? (
localRoles.map((roleId) => {
const role = availableRoles.find((r) => r.id === roleId);
return (
<Badge key={roleId} color="vanilla">
{role?.name ?? roleId}
</Badge>
);
})
{localRole ? (
<Badge color="vanilla">
{availableRoles.find((r) => r.id === localRole)?.name ?? localRole}
</Badge>
) : (
<span className="sa-drawer__input-text"></span>
)}
@@ -114,15 +109,14 @@ function OverviewTab({
) : (
<RolesSelect
id="sa-roles"
mode="multiple"
roles={availableRoles}
loading={rolesLoading}
isError={rolesError}
error={rolesErrorObj}
onRefetch={onRefetchRoles}
value={localRoles}
onChange={onRolesChange}
placeholder="Select roles"
value={localRole}
onChange={onRoleChange}
placeholder="Select role"
/>
)}
</div>

View File

@@ -80,7 +80,7 @@ function ServiceAccountDrawer({
parseAsBoolean.withDefault(false),
);
const [localName, setLocalName] = useState('');
const [localRoles, setLocalRoles] = useState<string[]>([]);
const [localRole, setLocalRole] = useState('');
const [isSaving, setIsSaving] = useState(false);
const [saveErrors, setSaveErrors] = useState<SaveError[]>([]);
@@ -116,7 +116,7 @@ function ServiceAccountDrawer({
}, [account?.id, account?.name, setKeysPage]);
useEffect(() => {
setLocalRoles(currentRoles.map((r) => r.id).filter(Boolean) as string[]);
setLocalRole(currentRoles[0]?.id ?? '');
}, [currentRoles]);
const isDeleted =
@@ -125,8 +125,7 @@ function ServiceAccountDrawer({
const isDirty =
account !== null &&
(localName !== (account.name ?? '') ||
JSON.stringify([...localRoles].sort()) !==
JSON.stringify([...currentRoles.map((r) => r.id).filter(Boolean)].sort()));
localRole !== (currentRoles[0]?.id ?? ''));
const {
roles: availableRoles,
@@ -216,7 +215,10 @@ function ServiceAccountDrawer({
const retryRolesUpdate = useCallback(async (): Promise<void> => {
try {
const failures = await applyDiff(localRoles, availableRoles);
const failures = await applyDiff(
[localRole].filter(Boolean),
availableRoles,
);
if (failures.length === 0) {
setSaveErrors((prev) => prev.filter((e) => e.context !== 'Roles update'));
} else {
@@ -240,7 +242,7 @@ function ServiceAccountDrawer({
),
);
}
}, [localRoles, availableRoles, applyDiff, toSaveApiError, makeRoleRetry]);
}, [localRole, availableRoles, applyDiff, toSaveApiError, makeRoleRetry]);
const handleSave = useCallback(async (): Promise<void> => {
if (!account || !isDirty) {
@@ -259,7 +261,7 @@ function ServiceAccountDrawer({
const [nameResult, rolesResult] = await Promise.allSettled([
namePromise,
applyDiff(localRoles, availableRoles),
applyDiff([localRole].filter(Boolean), availableRoles),
]);
const errors: SaveError[] = [];
@@ -308,7 +310,7 @@ function ServiceAccountDrawer({
account,
isDirty,
localName,
localRoles,
localRole,
availableRoles,
updateMutateAsync,
applyDiff,
@@ -410,8 +412,8 @@ function ServiceAccountDrawer({
account={account}
localName={localName}
onNameChange={handleNameChange}
localRoles={localRoles}
onRolesChange={setLocalRoles}
localRole={localRole}
onRoleChange={setLocalRole}
isDisabled={isDeleted}
availableRoles={availableRoles}
rolesLoading={rolesLoading}

View File

@@ -139,20 +139,20 @@ describe('ServiceAccountDrawer', () => {
});
});
it('changing roles enables Save; clicking Save sends updated roles in payload', async () => {
const updateSpy = jest.fn();
it('changing roles enables Save; clicking Save sends role add request without delete', async () => {
const roleSpy = jest.fn();
const deleteSpy = jest.fn();
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(
rest.put(SA_ENDPOINT, async (req, res, ctx) => {
updateSpy(await req.json());
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
rest.post(SA_ROLES_ENDPOINT, async (req, res, ctx) => {
roleSpy(await req.json());
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
rest.delete(SA_ROLE_DELETE_ENDPOINT, (_, res, ctx) => {
deleteSpy();
return res(ctx.status(200), ctx.json({ status: 'success', data: {} }));
}),
);
renderDrawer();
@@ -167,12 +167,12 @@ describe('ServiceAccountDrawer', () => {
await user.click(saveBtn);
await waitFor(() => {
expect(updateSpy).not.toHaveBeenCalled();
expect(roleSpy).toHaveBeenCalledWith(
expect.objectContaining({
id: '019c24aa-2248-7585-a129-4188b3473c27',
}),
);
expect(deleteSpy).not.toHaveBeenCalled();
});
});
@@ -350,7 +350,7 @@ describe('ServiceAccountDrawer save-error UX', () => {
).toBeInTheDocument();
});
it('role update failure shows SaveErrorItem with the role name context', async () => {
it('role add failure shows SaveErrorItem with the role name context', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
server.use(

View File

@@ -5,7 +5,7 @@
"tags": [
"quickstart"
],
"module": "apm",
"module": "home",
"relatedSearchKeywords": [
"apm",
"application performance monitoring",
@@ -22,6 +22,28 @@
"imgUrl": "/Logos/quickstart.svg",
"link": "/docs/cloud/quickstart/"
},
{
"dataSource": "signoz-mcp-server",
"label": "SigNoz MCP Server",
"tags": [
"quickstart"
],
"module": "home",
"relatedSearchKeywords": [
"agent",
"ai",
"mcp",
"mcp server",
"model context protocol",
"quickstart",
"signoz",
"signoz mcp",
"signoz mcp server",
"setup"
],
"imgUrl": "/Logos/signoz-brand-logo.svg",
"link": "/docs/ai/signoz-mcp-server/"
},
{
"dataSource": "migrate-from-datadog",
"label": "From Datadog",
@@ -1524,18 +1546,24 @@
"link": "/docs/userguide/collect_docker_logs/"
},
{
"dataSource": "vercel-logs",
"label": "Vercel logs",
"dataSource": "vercel",
"label": "Vercel",
"imgUrl": "/Logos/vercel.svg",
"tags": [
"apm/traces",
"logs"
],
"module": "logs",
"module": "home",
"relatedSearchKeywords": [
"collect vercel logs",
"logging",
"logs",
"opentelemetry drains",
"trace drain",
"traces",
"tracing",
"vercel",
"vercel drains",
"vercel functions logs",
"vercel log forwarding",
"vercel log monitoring",
@@ -1545,10 +1573,12 @@
"vercel observability",
"vercel opentelemetry integration",
"vercel to otel",
"vercel trace drain",
"vercel traces",
"vercel-logs"
],
"id": "vercel-logs",
"link": "/docs/userguide/vercel_logs_to_signoz/"
"link": "/docs/userguide/vercel-to-signoz/"
},
{
"dataSource": "heroku-logs",
@@ -4029,6 +4059,57 @@
],
"link": "/docs/pydantic-ai-observability/"
},
{
"dataSource": "qwen-observability",
"label": "Qwen",
"imgUrl": "/Logos/qwen.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"alibaba cloud",
"dashscope",
"llm",
"llm monitoring",
"monitoring",
"observability",
"otel qwen",
"qwen",
"qwen logs",
"qwen metrics",
"qwen monitoring",
"qwen observability",
"qwen response time",
"qwen traces"
],
"id": "qwen-observability",
"link": "/docs/qwen-observability/"
},
{
"dataSource": "n8n-cloud",
"label": "n8n Cloud",
"imgUrl": "/Logos/n8n.svg",
"tags": [
"LLM Monitoring"
],
"module": "apm",
"relatedSearchKeywords": [
"llm monitoring",
"monitoring",
"n8n",
"n8n cloud",
"n8n monitoring",
"n8n observability",
"n8n traces",
"observability",
"otel n8n",
"workflow monitoring",
"workflow traces"
],
"id": "n8n-cloud",
"link": "/docs/n8n-monitoring/"
},
{
"dataSource": "mastra-monitoring",
"label": "Mastra",
@@ -5158,6 +5239,31 @@
"id": "microsoft-sql-server",
"link": "/docs/integrations/sql-server/"
},
{
"dataSource": "hasura",
"label": "Hasura",
"imgUrl": "/Logos/hasura.svg",
"tags": [
"database"
],
"module": "apm",
"relatedSearchKeywords": [
"database",
"graphql",
"graphql engine",
"hasura",
"hasura graphql",
"hasura logs",
"hasura metrics",
"hasura monitoring",
"hasura observability",
"hasura traces",
"opentelemetry hasura",
"telemetry"
],
"id": "hasura",
"link": "/docs/integrations/opentelemetry-hasura/"
},
{
"dataSource": "supabase",
"label": "Supabase",

View File

@@ -1,10 +1,6 @@
import { useCallback, useMemo } from 'react';
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
import {
useGetUser,
useRemoveUserRoleByUserIDAndRoleID,
useSetRoleByUserID,
} from 'api/generated/services/users';
import { useGetUser, useSetRoleByUserID } from 'api/generated/services/users';
export interface MemberRoleUpdateFailure {
roleName: string;
@@ -43,7 +39,6 @@ export function useMemberRoleManager(
);
const { mutateAsync: setRole } = useSetRoleByUserID();
const { mutateAsync: removeRole } = useRemoveUserRoleByUserIDAndRoleID();
const applyDiff = useCallback(
async (
@@ -53,25 +48,12 @@ export function useMemberRoleManager(
const currentRoleIdSet = new Set(fetchedRoleIds);
const desiredRoleIdSet = new Set(localRoleIds.filter(Boolean));
const toRemove = currentUserRoles.filter((ur) => {
const id = ur.role?.id ?? ur.roleId;
return id && !desiredRoleIdSet.has(id);
});
const toAdd = availableRoles.filter(
(r) => r.id && desiredRoleIdSet.has(r.id) && !currentRoleIdSet.has(r.id),
);
/// TODO: re-enable deletes once BE for this is streamlined
const allOps = [
...toRemove.map((ur) => ({
roleName: ur.role?.name ?? 'unknown',
run: (): ReturnType<typeof removeRole> =>
removeRole({
pathParams: {
id: userId,
roleId: ur.role?.id ?? ur.roleId ?? '',
},
}),
})),
...toAdd.map((role) => ({
roleName: role.name ?? 'unknown',
run: (): ReturnType<typeof setRole> =>
@@ -94,7 +76,7 @@ export function useMemberRoleManager(
return failures;
},
[userId, fetchedRoleIds, currentUserRoles, setRole, removeRole],
[userId, fetchedRoleIds, setRole],
);
return { fetchedRoleIds, isLoading, applyDiff };

View File

@@ -3,7 +3,6 @@ import { useQueryClient } from 'react-query';
import {
getGetServiceAccountRolesQueryKey,
useCreateServiceAccountRole,
useDeleteServiceAccountRole,
useGetServiceAccountRoles,
} from 'api/generated/services/serviceaccount';
import type { AuthtypesRoleDTO } from 'api/generated/services/sigNoz.schemas';
@@ -36,7 +35,6 @@ export function useServiceAccountRoleManager(
// the retry for these mutations is safe due to being idempotent on backend
const { mutateAsync: createRole } = useCreateServiceAccountRole();
const { mutateAsync: deleteRole } = useDeleteServiceAccountRole();
const invalidateRoles = useCallback(
() =>
@@ -62,21 +60,13 @@ export function useServiceAccountRoleManager(
(r) => r.id && desiredRoleIds.has(r.id) && !currentRoleIds.has(r.id),
);
const removedRoles = currentRoles.filter(
(r) => r.id && !desiredRoleIds.has(r.id),
);
// TODO: re-enable deletes once BE for this is streamlined
const allOperations = [
...addedRoles.map((role) => ({
role,
run: (): ReturnType<typeof createRole> =>
createRole({ pathParams: { id: accountId }, data: { id: role.id } }),
})),
...removedRoles.map((role) => ({
role,
run: (): ReturnType<typeof deleteRole> =>
deleteRole({ pathParams: { id: accountId, rid: role.id } }),
})),
];
const results = await Promise.allSettled(
@@ -102,7 +92,7 @@ export function useServiceAccountRoleManager(
return failures;
},
[accountId, currentRoles, createRole, deleteRole, invalidateRoles],
[accountId, currentRoles, createRole, invalidateRoles],
);
return {

View File

@@ -240,6 +240,26 @@ func (m *MockNotificationManager) DeleteAllRoutePoliciesByName(ctx context.Conte
return nil
}
func (m *MockNotificationManager) GetRoutePoliciesByChannel(ctx context.Context, orgID string, channelName string) ([]*alertmanagertypes.RoutePolicy, error) {
if orgID == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "orgID cannot be empty")
}
var matched []*alertmanagertypes.RoutePolicy
for _, route := range m.routes {
if route.OrgID != orgID {
continue
}
for _, ch := range route.Channels {
if ch == channelName {
matched = append(matched, route)
break
}
}
}
return matched, nil
}
func (m *MockNotificationManager) Match(ctx context.Context, orgID string, ruleID string, set model.LabelSet) ([]string, error) {
key := getKey(orgID, ruleID)
if err := m.errors[key]; err != nil {

View File

@@ -59,6 +59,10 @@ func (m *MockSQLRouteStore) DeleteRouteByName(ctx context.Context, orgID string,
return m.routeStore.DeleteRouteByName(ctx, orgID, name)
}
func (m *MockSQLRouteStore) GetAll(ctx context.Context, orgID string) ([]*alertmanagertypes.RoutePolicy, error) {
return m.routeStore.GetAll(ctx, orgID)
}
func (m *MockSQLRouteStore) ExpectGetByID(orgID, id string, route *alertmanagertypes.RoutePolicy) {
rows := sqlmock.NewRows([]string{"id", "org_id", "name", "expression", "kind", "description", "enabled", "tags", "channels", "created_at", "updated_at", "created_by", "updated_by"})

View File

@@ -83,6 +83,18 @@ func (store *store) GetAllByName(ctx context.Context, orgID string, name string)
return routes, nil
}
func (store *store) GetAll(ctx context.Context, orgID string) ([]*routeTypes.RoutePolicy, error) {
var routes []*routeTypes.RoutePolicy
err := store.sqlstore.BunDBCtx(ctx).NewSelect().Model(&routes).Where("org_id = ?", orgID).Scan(ctx)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, nil
}
return nil, errors.Wrapf(err, errors.TypeInternal, errors.CodeInternal, "unable to fetch routing policies for orgID: %s", orgID)
}
return routes, nil
}
func (store *store) DeleteRouteByName(ctx context.Context, orgID string, name string) error {
_, err := store.sqlstore.BunDBCtx(ctx).NewDelete().Model((*routeTypes.RoutePolicy)(nil)).Where("org_id = ?", orgID).Where("name = ?", name).Exec(ctx)
if err != nil {

View File

@@ -23,6 +23,10 @@ type NotificationManager interface {
DeleteRoutePolicy(ctx context.Context, orgID string, routeID string) error
DeleteAllRoutePoliciesByName(ctx context.Context, orgID string, name string) error
// GetRoutePoliciesByChannel returns all route policies (both rule-based and policy-based)
// that reference the given channel name.
GetRoutePoliciesByChannel(ctx context.Context, orgID string, channelName string) ([]*alertmanagertypes.RoutePolicy, error)
// Route matching
Match(ctx context.Context, orgID string, ruleID string, set model.LabelSet) ([]string, error)
}

View File

@@ -155,6 +155,28 @@ func (r *provider) GetAllRoutePolicies(ctx context.Context, orgID string) ([]*al
return r.routeStore.GetAllByKind(ctx, orgID, alertmanagertypes.PolicyBasedExpression)
}
func (r *provider) GetRoutePoliciesByChannel(ctx context.Context, orgID string, channelName string) ([]*alertmanagertypes.RoutePolicy, error) {
if orgID == "" {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "orgID cannot be empty")
}
allRoutes, err := r.routeStore.GetAll(ctx, orgID)
if err != nil {
return nil, err
}
var matched []*alertmanagertypes.RoutePolicy
for _, route := range allRoutes {
for _, ch := range route.Channels {
if ch == channelName {
matched = append(matched, route)
break
}
}
}
return matched, nil
}
func (r *provider) DeleteRoutePolicy(ctx context.Context, orgID string, routeID string) error {
if routeID == "" {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "routeID cannot be empty")

View File

@@ -169,6 +169,21 @@ func (provider *provider) DeleteChannelByID(ctx context.Context, orgID string, c
return err
}
// Check if channel is referenced by any route policy (rule-based or policy-based)
policies, err := provider.notificationManager.GetRoutePoliciesByChannel(ctx, orgID, channel.Name)
if err != nil {
return err
}
if len(policies) > 0 {
names := make([]string, 0, len(policies))
for _, p := range policies {
names = append(names, p.Name)
}
return errors.NewInvalidInputf(errors.CodeInvalidInput,
"channel %q cannot be deleted because it is used by the following routing policies: %v",
channel.Name, names)
}
config, err := provider.configStore.Get(ctx, orgID)
if err != nil {
return err

View File

@@ -376,7 +376,7 @@ func (module *module) getOrGetSetIdentity(ctx context.Context, serviceAccountID
}
func (module *module) setRole(ctx context.Context, orgID valuer.UUID, id valuer.UUID, role *authtypes.Role) error {
serviceAccount, err := module.Get(ctx, orgID, id)
serviceAccount, err := module.GetWithRoles(ctx, orgID, id)
if err != nil {
return err
}
@@ -386,12 +386,24 @@ func (module *module) setRole(ctx context.Context, orgID valuer.UUID, id valuer.
return err
}
err = module.authz.Grant(ctx, orgID, []string{role.Name}, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, id.String(), orgID, nil))
err = module.authz.ModifyGrant(ctx, orgID, serviceAccount.RoleNames(), []string{role.Name}, authtypes.MustNewSubject(authtypes.TypeableServiceAccount, id.String(), orgID, nil))
if err != nil {
return err
}
err = module.store.CreateServiceAccountRole(ctx, serviceAccountRole)
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.store.DeleteServiceAccountRoles(ctx, serviceAccount.ID)
if err != nil {
return err
}
err = module.store.CreateServiceAccountRole(ctx, serviceAccountRole)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}

View File

@@ -170,6 +170,21 @@ func (store *store) CreateServiceAccountRole(ctx context.Context, serviceAccount
return nil
}
func (store *store) DeleteServiceAccountRoles(ctx context.Context, serviceAccountID valuer.UUID) error {
_, err := store.
sqlstore.
BunDBCtx(ctx).
NewDelete().
Model(new(serviceaccounttypes.ServiceAccountRole)).
Where("service_account_id = ?", serviceAccountID).
Exec(ctx)
if err != nil {
return err
}
return nil
}
func (store *store) DeleteServiceAccountRole(ctx context.Context, serviceAccountID valuer.UUID, roleID valuer.UUID) error {
_, err := store.
sqlstore.

View File

@@ -5,6 +5,7 @@ import (
"net/http"
"time"
"github.com/SigNoz/signoz/pkg/statsreporter"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/serviceaccounttypes"
"github.com/SigNoz/signoz/pkg/valuer"
@@ -66,6 +67,8 @@ type Module interface {
GetIdentity(context.Context, string) (*authtypes.Identity, error)
Config() Config
statsreporter.StatsCollector
}
type Handler interface {

View File

@@ -383,6 +383,11 @@ func (module *setter) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "cannot self delete")
}
err = user.UpdateStatus(types.UserStatusDeleted)
if err != nil {
return err
}
userRoles, err := module.getter.GetRolesByUserID(ctx, user.ID)
if err != nil {
return err
@@ -406,6 +411,8 @@ func (module *setter) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
traitsOrProperties := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Deleted", map[string]any{
"deleted_by": deletedBy,
})
@@ -568,8 +575,13 @@ func (module *setter) UpdatePasswordByResetPasswordToken(ctx context.Context, to
roleNames := roleNamesFromUserRoles(userRoles)
isPendingInviteUser := user.Status == types.UserStatusPendingInvite
// since grant is idempotent, multiple calls won't cause issues in case of retries
if user.Status == types.UserStatusPendingInvite {
if isPendingInviteUser {
if err := user.UpdateStatus(types.UserStatusActive); err != nil {
return err
}
if err = module.authz.Grant(
ctx,
user.OrgID,
@@ -580,15 +592,14 @@ func (module *setter) UpdatePasswordByResetPasswordToken(ctx context.Context, to
}
traitsOrProperties := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Activated", traitsOrProperties)
}
return module.store.RunInTx(ctx, func(ctx context.Context) error {
if user.Status == types.UserStatusPendingInvite {
if err := user.UpdateStatus(types.UserStatusActive); err != nil {
return err
}
if err := module.store.UpdateUser(ctx, user.OrgID, user); err != nil {
if isPendingInviteUser {
err := module.store.UpdateUser(ctx, user.OrgID, user)
if err != nil {
return err
}
}
@@ -817,6 +828,7 @@ func (module *setter) activatePendingUser(ctx context.Context, user *types.User,
}
traitsOrProperties := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traitsOrProperties)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Activated", traitsOrProperties)
return nil
@@ -866,16 +878,17 @@ func (module *setter) AddUserRole(ctx context.Context, orgID, userID valuer.UUID
if err != nil {
return err
}
for _, userRole := range existingUserRoles {
if userRole.Role != nil && userRole.Role.Name == roleName {
return nil // role already assigned no-op
}
existingRoles := make([]string, len(existingUserRoles))
for idx, role := range existingUserRoles {
existingRoles[idx] = role.Role.Name
}
// grant via authz (idempotent)
if err := module.authz.Grant(
if err := module.authz.ModifyGrant(
ctx,
orgID,
existingRoles,
[]string{roleName},
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), existingUser.OrgID, nil),
); err != nil {
@@ -884,7 +897,20 @@ func (module *setter) AddUserRole(ctx context.Context, orgID, userID valuer.UUID
// create user_role entry
userRoles := authtypes.NewUserRoles(userID, foundRoles)
if err := module.userRoleStore.CreateUserRoles(ctx, userRoles); err != nil {
err = module.store.RunInTx(ctx, func(ctx context.Context) error {
err = module.userRoleStore.DeleteUserRoles(ctx, existingUser.ID)
if err != nil {
return err
}
err := module.userRoleStore.CreateUserRoles(ctx, userRoles)
if err != nil {
return err
}
return nil
})
if err != nil {
return err
}

View File

@@ -279,7 +279,6 @@ func (store *store) SoftDeleteUser(ctx context.Context, orgID string, id string)
_, err = tx.NewUpdate().
Model(new(types.User)).
Set("status = ?", types.UserStatusDeleted).
Set("deleted_at = ?", now).
Set("updated_at = ?", now).
Where("org_id = ?", orgID).
Where("id = ?", id).

View File

@@ -124,8 +124,12 @@ func (q *querier) postProcessResults(ctx context.Context, results map[string]any
continue
}
stepInterval, err := req.StepIntervalForQuery(name)
if err != nil {
return nil, err
}
funcs := []qbtypes.Function{{Name: qbtypes.FunctionNameFillZero}}
funcs = q.prepareFillZeroArgsWithStep(funcs, req, req.StepIntervalForQuery(name))
funcs = q.prepareFillZeroArgsWithStep(funcs, req, stepInterval)
// empty time series if it doesn't exist
tsData, ok := typedResults[name].Value.(*qbtypes.TimeSeriesData)
if !ok {

View File

@@ -9,7 +9,6 @@ import (
"github.com/SigNoz/signoz/pkg/prometheus"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetadata"
"github.com/SigNoz/signoz/pkg/telemetrymeter"
@@ -73,22 +72,12 @@ func newProvider(
traceFieldMapper := telemetrytraces.NewFieldMapper()
traceConditionBuilder := telemetrytraces.NewConditionBuilder(traceFieldMapper)
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(settings, nil, traceFieldMapper, traceConditionBuilder, nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
settings,
telemetryMetadataStore,
traceFieldMapper,
traceConditionBuilder,
resourceFilterStmtBuilder,
traceAggExprRewriter,
telemetryStore,
)
@@ -99,22 +88,13 @@ func newProvider(
telemetryMetadataStore,
traceFieldMapper,
traceConditionBuilder,
traceStmtBuilder, // Pass the regular trace statement builder
resourceFilterStmtBuilder, // Pass the resource filter statement builder
traceStmtBuilder, // Pass the regular trace statement builder
traceAggExprRewriter,
)
// Create log statement builder
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
settings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
telemetryMetadataStore,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
settings,
telemetrylogs.DefaultFullTextColumn,
@@ -127,7 +107,6 @@ func newProvider(
telemetryMetadataStore,
logFieldMapper,
logConditionBuilder,
logResourceFilterStmtBuilder,
logAggExprRewriter,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.GetBodyJSONKey,

View File

@@ -14,10 +14,11 @@ func ApplyHavingClause(result []*v3.Result, queryRangeParams *v3.QueryRangeParam
builderQueries := queryRangeParams.CompositeQuery.BuilderQueries
// apply having clause for metrics and formula
if builderQueries != nil &&
(builderQueries[result.QueryName].DataSource == v3.DataSourceMetrics ||
builderQueries[result.QueryName].QueryName != builderQueries[result.QueryName].Expression) {
havingClause := builderQueries[result.QueryName].Having
builderQuery := builderQueries[result.QueryName]
if builderQuery != nil &&
(builderQuery.DataSource == v3.DataSourceMetrics ||
builderQuery.QueryName != builderQuery.Expression) {
havingClause := builderQuery.Having
for i := 0; i < len(result.Series); i++ {
for j := 0; j < len(result.Series[i].Points); j++ {

View File

@@ -312,6 +312,72 @@ func TestApplyHavingCaluse(t *testing.T) {
},
},
},
{
name: "query not in builder queries should not panic",
results: []*v3.Result{
{
QueryName: "A",
Series: []*v3.Series{
{
Points: []v3.Point{
{Value: 1.0},
{Value: 2.0},
},
},
},
},
},
params: &v3.QueryRangeParamsV3{
CompositeQuery: &v3.CompositeQuery{
BuilderQueries: map[string]*v3.BuilderQuery{},
},
},
want: []*v3.Result{
{
QueryName: "A",
Series: []*v3.Series{
{
Points: []v3.Point{
{Value: 1.0},
{Value: 2.0},
},
},
},
},
},
},
{
name: "nil builder queries should not panic",
results: []*v3.Result{
{
QueryName: "A",
Series: []*v3.Series{
{
Points: []v3.Point{
{Value: 1.0},
},
},
},
},
},
params: &v3.QueryRangeParamsV3{
CompositeQuery: &v3.CompositeQuery{
BuilderQueries: nil,
},
},
want: []*v3.Result{
{
QueryName: "A",
Series: []*v3.Series{
{
Points: []v3.Point{
{Value: 1.0},
},
},
},
},
},
},
}
for _, tc := range testCases {

View File

@@ -139,9 +139,6 @@ func WithRuleStateHistoryModule(module rulestatehistory.Module) RuleOption {
}
func NewBaseRule(id string, orgID valuer.UUID, p *ruletypes.PostableRule, opts ...RuleOption) (*BaseRule, error) {
if p.RuleCondition == nil || !p.RuleCondition.IsValid() {
return nil, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid rule condition")
}
threshold, err := p.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
return nil, err

View File

@@ -320,6 +320,38 @@ func (m *Manager) Stop(_ context.Context) {
m.logger.Info("rule manager stopped")
}
// validateChannels checks that every channel referenced by the rule
// exists as a notification channel for the given org.
func (m *Manager) validateChannels(ctx context.Context, orgID string, rule *ruletypes.PostableRule) error {
channels := rule.Channels()
if len(channels) == 0 {
return nil
}
orgChannels, err := m.alertmanager.ListChannels(ctx, orgID)
if err != nil {
return err
}
known := make(map[string]struct{}, len(orgChannels))
for _, ch := range orgChannels {
known[ch.Name] = struct{}{}
}
var unknown []string
for _, name := range channels {
if _, ok := known[name]; !ok {
unknown = append(unknown, name)
}
}
if len(unknown) > 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput,
"channels: the following channels do not exist: %v", unknown)
}
return nil
}
// EditRule writes the rule definition to the
// datastore and also updates the rule executor
func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID) error {
@@ -336,7 +368,12 @@ func (m *Manager) EditRule(ctx context.Context, ruleStr string, id valuer.UUID)
if err != nil {
return err
}
if err := parsedRule.Validate(); err != nil {
return err
}
if err := m.validateChannels(ctx, claims.OrgID, &parsedRule); err != nil {
return err
}
existingRule, err := m.ruleStore.GetStoredRule(ctx, id)
if err != nil {
return err
@@ -533,7 +570,12 @@ func (m *Manager) CreateRule(ctx context.Context, ruleStr string) (*ruletypes.Ge
if err != nil {
return nil, err
}
if err := parsedRule.Validate(); err != nil {
return nil, err
}
if err := m.validateChannels(ctx, claims.OrgID, &parsedRule); err != nil {
return nil, err
}
now := time.Now()
storedRule := &ruletypes.Rule{
Identifiable: types.Identifiable{
@@ -920,7 +962,12 @@ func (m *Manager) PatchRule(ctx context.Context, ruleStr string, id valuer.UUID)
m.logger.ErrorContext(ctx, "failed to unmarshal patched rule with given id", slog.String("rule.id", id.StringValue()), errors.Attr(err))
return nil, err
}
if err := storedRule.Validate(); err != nil {
return nil, err
}
if err := m.validateChannels(ctx, claims.OrgID, &storedRule); err != nil {
return nil, err
}
// deploy or un-deploy task according to patched (new) rule state
if err := m.syncRuleStateWithTask(ctx, orgID, taskName, &storedRule); err != nil {
m.logger.ErrorContext(ctx, "failed to sync stored rule state with the task", slog.String("task.name", taskName), errors.Attr(err))
@@ -971,6 +1018,12 @@ func (m *Manager) TestNotification(ctx context.Context, orgID valuer.UUID, ruleS
if err != nil {
return 0, errors.WrapInvalidInputf(err, errors.CodeInvalidInput, "failed to unmarshal rule")
}
if err := parsedRule.Validate(); err != nil {
return 0, err
}
if err := m.validateChannels(ctx, orgID.StringValue(), &parsedRule); err != nil {
return 0, err
}
if !parsedRule.NotificationSettings.UsePolicy {
parsedRule.NotificationSettings.GroupBy = append(parsedRule.NotificationSettings.GroupBy, ruletypes.LabelThresholdName)
}

View File

@@ -8,7 +8,6 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querier"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
"github.com/SigNoz/signoz/pkg/telemetrylogs"
"github.com/SigNoz/signoz/pkg/telemetrymetrics"
"github.com/SigNoz/signoz/pkg/telemetrystore"
@@ -66,19 +65,8 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
}
metadataStore.KeysMap = keysMap
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
logFieldMapper := telemetrylogs.NewFieldMapper()
logConditionBuilder := telemetrylogs.NewConditionBuilder(logFieldMapper)
logResourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
providerSettings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
metadataStore,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.GetBodyJSONKey,
)
logAggExprRewriter := querybuilder.NewAggExprRewriter(
providerSettings,
telemetrylogs.DefaultFullTextColumn,
@@ -91,7 +79,6 @@ func prepareQuerierForLogs(telemetryStore telemetrystore.TelemetryStore, keysMap
metadataStore,
logFieldMapper,
logConditionBuilder,
logResourceFilterStmtBuilder,
logAggExprRewriter,
telemetrylogs.DefaultFullTextColumn,
telemetrylogs.GetBodyJSONKey,
@@ -127,22 +114,12 @@ func prepareQuerierForTraces(telemetryStore telemetrystore.TelemetryStore, keysM
traceFieldMapper := telemetrytraces.NewFieldMapper()
traceConditionBuilder := telemetrytraces.NewConditionBuilder(traceFieldMapper)
resourceFilterFieldMapper := resourcefilter.NewFieldMapper()
resourceFilterConditionBuilder := resourcefilter.NewConditionBuilder(resourceFilterFieldMapper)
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
providerSettings,
resourceFilterFieldMapper,
resourceFilterConditionBuilder,
metadataStore,
)
traceAggExprRewriter := querybuilder.NewAggExprRewriter(providerSettings, nil, traceFieldMapper, traceConditionBuilder, nil)
traceStmtBuilder := telemetrytraces.NewTraceQueryStatementBuilder(
providerSettings,
metadataStore,
traceFieldMapper,
traceConditionBuilder,
resourceFilterStmtBuilder,
traceAggExprRewriter,
telemetryStore,
)

View File

@@ -1,8 +0,0 @@
package resourcefilter
const (
TracesDBName = "signoz_traces"
TraceResourceV3TableName = "distributed_traces_v3_resource"
LogsDBName = "signoz_logs"
LogsResourceV2TableName = "distributed_logs_v2_resource"
)

View File

@@ -104,8 +104,15 @@ func extractCHOriginFieldFromQuery(query string) (string, error) {
return "", errors.NewInternalf(errors.CodeInternal, "failed to parse origin field from query: %s", err.Error())
}
if len(stmts) == 0 {
return "", errors.NewInternalf(errors.CodeInternal, "no statements found in query")
}
// Get the first statement which should be a SELECT
selectStmt := stmts[0].(*parser.SelectQuery)
selectStmt, ok := stmts[0].(*parser.SelectQuery)
if !ok {
return "", errors.NewInternalf(errors.CodeInternal, "expected SELECT query, got %T", stmts[0])
}
// If query has multiple select items, return blank string as we don't expect multiple select items
if len(selectStmt.SelectItems) > 1 {

View File

@@ -2,6 +2,7 @@ package queryparser
import (
"context"
"fmt"
"strings"
@@ -23,7 +24,15 @@ func New(settings factory.ProviderSettings) QueryParser {
}
}
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType qbtypes.QueryType, query string) (*queryfilterextractor.FilterResult, error) {
func (p *queryParserImpl) AnalyzeQueryFilter(ctx context.Context, queryType qbtypes.QueryType, query string) (result *queryfilterextractor.FilterResult, err error) {
// the third-party clickhouse sql parser can panic on certain inputs, recover gracefully
defer func() {
if r := recover(); r != nil {
result = nil
err = errors.NewInternalf(errors.CodeInternal, "failed to analyze query filter: %s", fmt.Sprint(r))
}
}()
var extractorType queryfilterextractor.ExtractorType
switch queryType {
case qbtypes.QueryTypePromQL:

View File

@@ -194,6 +194,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewAddServiceAccountFactory(sqlstore, sqlschema),
sqlmigration.NewDeprecateAPIKeyFactory(sqlstore, sqlschema),
sqlmigration.NewServiceAccountAuthzactory(sqlstore),
sqlmigration.NewDropUserDeletedAtFactory(sqlstore, sqlschema),
)
}

View File

@@ -437,6 +437,7 @@ func New(
tokenizer,
config,
modules.AuthDomain,
modules.ServiceAccount,
}
// Initialize stats reporter from the available stats reporter provider factories

View File

@@ -0,0 +1,84 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type dropUserDeletedAt struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewDropUserDeletedAtFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("drop_user_deleted_at"), func(ctx context.Context, ps factory.ProviderSettings, c Config) (SQLMigration, error) {
return &dropUserDeletedAt{sqlstore: sqlstore, sqlschema: sqlschema}, nil
})
}
func (migration *dropUserDeletedAt) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *dropUserDeletedAt) Up(ctx context.Context, db *bun.DB) error {
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
table, _, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
deletedAtColumn := &sqlschema.Column{
Name: sqlschema.ColumnName("deleted_at"),
DataType: sqlschema.DataTypeTimestamp,
Nullable: false,
}
sqls := [][]byte{}
dropIndexSQLs := migration.sqlschema.Operator().DropIndex(&sqlschema.UniqueIndex{TableName: "users", ColumnNames: []sqlschema.ColumnName{"org_id", "email", "deleted_at"}})
sqls = append(sqls, dropIndexSQLs...)
dropSQLs := migration.sqlschema.Operator().DropColumn(table, deletedAtColumn)
sqls = append(sqls, dropSQLs...)
indexSQLs := migration.sqlschema.Operator().CreateIndex(
&sqlschema.PartialUniqueIndex{
TableName: "users",
ColumnNames: []sqlschema.ColumnName{"email", "org_id"},
Where: "status != 'deleted'",
})
sqls = append(sqls, indexSQLs...)
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
return nil
}
func (migration *dropUserDeletedAt) Down(context.Context, *bun.DB) error {
return nil
}

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
@@ -833,25 +832,13 @@ func buildJSONTestStatementBuilder(t *testing.T) *logQueryStatementBuilder {
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
resourceFilterFM := resourcefilter.NewFieldMapper()
resourceFilterCB := resourcefilter.NewConditionBuilder(resourceFilterFM)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourcefilter.NewLogResourceFilterStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
resourceFilterFM,
resourceFilterCB,
mockMetadataStore,
DefaultFullTextColumn,
GetBodyJSONKey,
)
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,

View File

@@ -9,6 +9,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/huandu/go-sqlbuilder"
@@ -33,13 +34,22 @@ func NewLogQueryStatementBuilder(
metadataStore telemetrytypes.MetadataStore,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.LogAggregation],
aggExprRewriter qbtypes.AggExprRewriter,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *logQueryStatementBuilder {
logsSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrylogs")
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.LogAggregation](
settings,
DBName,
LogsResourceV2TableName,
telemetrytypes.SignalLogs,
metadataStore,
fullTextColumn,
jsonKeyToKey,
)
return &logQueryStatementBuilder{
logger: logsSettings.Logger(),
metadataStore: metadataStore,

View File

@@ -8,35 +8,12 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/stretchr/testify/require"
)
func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.LogAggregation] {
fm := resourcefilter.NewFieldMapper()
cb := resourcefilter.NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
keysMap := buildCompleteFieldKeyMap(time.Now())
for _, keys := range keysMap {
for _, key := range keys {
key.Signal = telemetrytypes.SignalLogs
}
}
mockMetadataStore.KeysMap = keysMap
return resourcefilter.NewLogResourceFilterStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
mockMetadataStore,
DefaultFullTextColumn,
GetBodyJSONKey,
)
}
func TestStatementBuilderTimeSeries(t *testing.T) {
// Create a test release time
@@ -225,14 +202,11 @@ func TestStatementBuilderTimeSeries(t *testing.T) {
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -349,14 +323,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -492,14 +463,11 @@ func TestStatementBuilderListQueryResourceTests(t *testing.T) {
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -569,14 +537,11 @@ func TestStatementBuilderTimeSeriesBodyGroupBy(t *testing.T) {
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -665,14 +630,11 @@ func TestStatementBuilderListQueryServiceCollision(t *testing.T) {
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -890,14 +852,11 @@ func TestAdjustKey(t *testing.T) {
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -1045,13 +1004,11 @@ func TestStmtBuilderBodyField(t *testing.T) {
mockMetadataStore.KeysMap[field.Name] = append(mockMetadataStore.KeysMap[field.Name], &f)
}
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,
@@ -1135,13 +1092,11 @@ func TestStmtBuilderBodyFullTextSearch(t *testing.T) {
mockMetadataStore.KeysMap[field.Name] = append(mockMetadataStore.KeysMap[field.Name], &f)
}
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewLogQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
DefaultFullTextColumn,
GetBodyJSONKey,

View File

@@ -8,6 +8,7 @@ const (
TagAttributesV2LocalTableName = "tag_attributes_v2"
LogAttributeKeysTblName = "distributed_logs_attribute_keys"
LogResourceKeysTblName = "distributed_logs_resource_keys"
LogsResourceV2TableName = "distributed_logs_v2_resource"
PathTypesTableName = "distributed_json_path_types"
PromotedPathsTableName = "distributed_json_promoted_paths"
SkipIndexTableName = "system.data_skipping_indices"

View File

@@ -1,4 +1,4 @@
package resourcefilter
package telemetryresourcefilter
import (
"context"

View File

@@ -1,4 +1,4 @@
package resourcefilter
package telemetryresourcefilter
import (
"context"

View File

@@ -1,4 +1,4 @@
package resourcefilter
package telemetryresourcefilter
import (
"context"

View File

@@ -1,11 +1,10 @@
package resourcefilter
package telemetryresourcefilter
import (
"context"
"fmt"
"log/slog"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
@@ -13,30 +12,11 @@ import (
"github.com/huandu/go-sqlbuilder"
)
var (
ErrUnsupportedSignal = errors.NewInvalidInputf(errors.CodeInvalidInput, "unsupported signal type")
)
// Configuration for different signal types.
type signalConfig struct {
dbName string
tableName string
}
var signalConfigs = map[telemetrytypes.Signal]signalConfig{
telemetrytypes.SignalTraces: {
dbName: TracesDBName,
tableName: TraceResourceV3TableName,
},
telemetrytypes.SignalLogs: {
dbName: LogsDBName,
tableName: LogsResourceV2TableName,
},
}
// Generic resource filter statement builder.
// resourceFilterStatementBuilder builds resource fingerprint filter CTEs.
type resourceFilterStatementBuilder[T any] struct {
logger *slog.Logger
dbName string
tableName string
fieldMapper qbtypes.FieldMapper
conditionBuilder qbtypes.ConditionBuilder
metadataStore telemetrytypes.MetadataStore
@@ -52,38 +32,26 @@ var (
_ qbtypes.StatementBuilder[qbtypes.LogAggregation] = (*resourceFilterStatementBuilder[qbtypes.LogAggregation])(nil)
)
// NewTraceResourceFilterStatementBuilder creates a new trace resource filter statement builder.
func NewTraceResourceFilterStatementBuilder(
func New[T any](
settings factory.ProviderSettings,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
metadataStore telemetrytypes.MetadataStore,
) *resourceFilterStatementBuilder[qbtypes.TraceAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
return &resourceFilterStatementBuilder[qbtypes.TraceAggregation]{
logger: set.Logger(),
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
metadataStore: metadataStore,
signal: telemetrytypes.SignalTraces,
}
}
func NewLogResourceFilterStatementBuilder(
settings factory.ProviderSettings,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
dbName string,
tableName string,
signal telemetrytypes.Signal,
metadataStore telemetrytypes.MetadataStore,
fullTextColumn *telemetrytypes.TelemetryFieldKey,
jsonKeyToKey qbtypes.JsonKeyToFieldFunc,
) *resourceFilterStatementBuilder[qbtypes.LogAggregation] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter")
return &resourceFilterStatementBuilder[qbtypes.LogAggregation]{
) *resourceFilterStatementBuilder[T] {
set := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetryresourcefilter")
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
return &resourceFilterStatementBuilder[T]{
logger: set.Logger(),
fieldMapper: fieldMapper,
conditionBuilder: conditionBuilder,
dbName: dbName,
tableName: tableName,
fieldMapper: fm,
conditionBuilder: cb,
metadataStore: metadataStore,
signal: telemetrytypes.SignalLogs,
signal: signal,
fullTextColumn: fullTextColumn,
jsonKeyToKey: jsonKeyToKey,
}
@@ -120,14 +88,9 @@ func (b *resourceFilterStatementBuilder[T]) Build(
query qbtypes.QueryBuilderQuery[T],
variables map[string]qbtypes.VariableItem,
) (*qbtypes.Statement, error) {
config, exists := signalConfigs[b.signal]
if !exists {
return nil, errors.WrapInvalidInputf(ErrUnsupportedSignal, errors.CodeInvalidInput, "unsupported signal: %s", b.signal)
}
q := sqlbuilder.NewSelectBuilder()
q.Select("fingerprint")
q.From(fmt.Sprintf("%s.%s", config.dbName, config.tableName))
q.From(fmt.Sprintf("%s.%s", b.dbName, b.tableName))
keySelectors := b.getKeySelectors(query)
keys, _, err := b.metadataStore.GetKeysMulti(ctx, keySelectors)

View File

@@ -1,4 +1,4 @@
package resourcefilter
package telemetryresourcefilter
import (
"context"
@@ -367,16 +367,17 @@ func TestResourceFilterStatementBuilder_Traces(t *testing.T) {
},
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildTestFieldKeyMap(telemetrytypes.SignalTraces)
builder := NewTraceResourceFilterStatementBuilder(
builder := New[qbtypes.TraceAggregation](
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
"signoz_traces",
"distributed_traces_v3_resource",
telemetrytypes.SignalTraces,
mockMetadataStore,
nil,
nil,
)
for _, c := range cases {
@@ -583,15 +584,14 @@ func TestResourceFilterStatementBuilder_Logs(t *testing.T) {
},
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildTestFieldKeyMap(telemetrytypes.SignalLogs)
builder := NewLogResourceFilterStatementBuilder(
builder := New[qbtypes.LogAggregation](
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
"signoz_logs",
"distributed_logs_v2_resource",
telemetrytypes.SignalLogs,
mockMetadataStore,
nil,
nil,
@@ -645,16 +645,17 @@ func TestResourceFilterStatementBuilder_Variables(t *testing.T) {
},
}
fm := NewFieldMapper()
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildTestFieldKeyMap(telemetrytypes.SignalTraces)
builder := NewTraceResourceFilterStatementBuilder(
builder := New[qbtypes.TraceAggregation](
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
"signoz_traces",
"distributed_traces_v3_resource",
telemetrytypes.SignalTraces,
mockMetadataStore,
nil,
nil,
)
for _, c := range cases {

View File

@@ -10,6 +10,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
"github.com/SigNoz/signoz/pkg/telemetrystore"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
@@ -38,11 +39,21 @@ func NewTraceQueryStatementBuilder(
metadataStore telemetrytypes.MetadataStore,
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
aggExprRewriter qbtypes.AggExprRewriter,
telemetryStore telemetrystore.TelemetryStore,
) *traceQueryStatementBuilder {
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.TraceAggregation](
settings,
DBName,
TracesResourceV3TableName,
telemetrytypes.SignalTraces,
metadataStore,
nil,
nil,
)
return &traceQueryStatementBuilder{
logger: tracesSettings.Logger(),
metadataStore: metadataStore,

View File

@@ -8,27 +8,12 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
"github.com/stretchr/testify/require"
)
func resourceFilterStmtBuilder() qbtypes.StatementBuilder[qbtypes.TraceAggregation] {
fm := resourcefilter.NewFieldMapper()
cb := resourcefilter.NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
return resourcefilter.NewTraceResourceFilterStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
fm,
cb,
mockMetadataStore,
)
}
func TestStatementBuilder(t *testing.T) {
cases := []struct {
name string
@@ -372,14 +357,11 @@ func TestStatementBuilder(t *testing.T) {
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -668,14 +650,11 @@ func TestStatementBuilderListQuery(t *testing.T) {
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -778,14 +757,11 @@ func TestStatementBuilderListQueryWithCorruptData(t *testing.T) {
}
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -931,14 +907,11 @@ func TestStatementBuilderTraceQuery(t *testing.T) {
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -1147,14 +1120,11 @@ func TestAdjustKey(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -1422,14 +1392,11 @@ func TestAdjustKeys(t *testing.T) {
cb := NewConditionBuilder(fm)
mockMetadataStore := telemetrytypestest.NewMockMetadataStore()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
statementBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)

View File

@@ -9,4 +9,5 @@ const (
TopLevelOperationsTableName = "distributed_top_level_operations"
TraceSummaryTableName = "distributed_trace_summary"
SpanAttributesKeysTblName = "distributed_span_attributes_keys"
TracesResourceV3TableName = "distributed_traces_v3_resource"
)

View File

@@ -392,13 +392,11 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -409,7 +407,6 @@ func TestTraceOperatorStatementBuilder(t *testing.T) {
fm,
cb,
traceStmtBuilder,
resourceFilterStmtBuilder,
aggExprRewriter,
)
@@ -508,13 +505,11 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
mockMetadataStore.KeysMap = buildCompleteFieldKeyMap()
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
resourceFilterStmtBuilder := resourceFilterStmtBuilder()
traceStmtBuilder := NewTraceQueryStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil,
)
@@ -525,7 +520,6 @@ func TestTraceOperatorStatementBuilderErrors(t *testing.T) {
fm,
cb,
traceStmtBuilder,
resourceFilterStmtBuilder,
aggExprRewriter,
)

View File

@@ -7,6 +7,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/telemetryresourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
)
@@ -29,10 +30,20 @@ func NewTraceOperatorStatementBuilder(
fieldMapper qbtypes.FieldMapper,
conditionBuilder qbtypes.ConditionBuilder,
traceStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
resourceFilterStmtBuilder qbtypes.StatementBuilder[qbtypes.TraceAggregation],
aggExprRewriter qbtypes.AggExprRewriter,
) *traceOperatorStatementBuilder {
tracesSettings := factory.NewScopedProviderSettings(settings, "github.com/SigNoz/signoz/pkg/telemetrytraces")
resourceFilterStmtBuilder := telemetryresourcefilter.New[qbtypes.TraceAggregation](
settings,
DBName,
TracesResourceV3TableName,
telemetrytypes.SignalTraces,
metadataStore,
nil,
nil,
)
return &traceOperatorStatementBuilder{
logger: tracesSettings.Logger(),
metadataStore: metadataStore,

View File

@@ -7,7 +7,6 @@ import (
"github.com/SigNoz/signoz/pkg/instrumentation/instrumentationtest"
"github.com/SigNoz/signoz/pkg/querybuilder"
"github.com/SigNoz/signoz/pkg/querybuilder/resourcefilter"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes"
"github.com/SigNoz/signoz/pkg/types/telemetrytypes/telemetrytypestest"
@@ -35,15 +34,6 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
Signal: telemetrytypes.SignalTraces,
}}
resourceFilterFM := resourcefilter.NewFieldMapper()
resourceFilterCB := resourcefilter.NewConditionBuilder(resourceFilterFM)
resourceFilterStmtBuilder := resourcefilter.NewTraceResourceFilterStatementBuilder(
instrumentationtest.New().ToProviderSettings(),
resourceFilterFM,
resourceFilterCB,
mockMetadataStore,
)
aggExprRewriter := querybuilder.NewAggExprRewriter(instrumentationtest.New().ToProviderSettings(), nil, fm, cb, nil)
statementBuilder := NewTraceQueryStatementBuilder(
@@ -51,7 +41,6 @@ func TestTraceTimeRangeOptimization(t *testing.T) {
mockMetadataStore,
fm,
cb,
resourceFilterStmtBuilder,
aggExprRewriter,
nil, // telemetryStore is nil - optimization won't happen but code path is tested
)

View File

@@ -136,4 +136,5 @@ type RouteStore interface {
GetAllByKind(ctx context.Context, orgID string, kind ExpressionKind) ([]*RoutePolicy, error)
GetAllByName(ctx context.Context, orgID string, name string) ([]*RoutePolicy, error)
DeleteRouteByName(ctx context.Context, orgID string, name string) error
GetAll(ctx context.Context, orgID string) ([]*RoutePolicy, error)
}

View File

@@ -91,6 +91,16 @@ func (f QueryBuilderFormula) Validate() error {
)
}
// Validate expression is parseable
if _, err := govaluate.NewEvaluableExpressionWithFunctions(f.Expression, EvalFuncs()); err != nil {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"failed to parse expression for formula query %q: %s",
f.Name,
err.Error(),
)
}
// Validate functions if present
for i, fn := range f.Functions {
if err := fn.Validate(); err != nil {

View File

@@ -305,7 +305,7 @@ func (q *QueryRangeRequest) PrepareJSONSchema(schema *jsonschema.Schema) error {
return nil
}
func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
func (r *QueryRangeRequest) StepIntervalForQuery(name string) (int64, error) {
stepsMap := make(map[string]int64)
for _, query := range r.CompositeQuery.Queries {
switch spec := query.Spec.(type) {
@@ -317,11 +317,13 @@ func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
stepsMap[spec.Name] = spec.StepInterval.Milliseconds()
case PromQuery:
stepsMap[spec.Name] = spec.Step.Milliseconds()
case QueryBuilderTraceOperator:
stepsMap[spec.Name] = spec.StepInterval.Milliseconds()
}
}
if step, ok := stepsMap[name]; ok {
return step
return step, nil
}
exprStr := ""
@@ -335,12 +337,15 @@ func (r *QueryRangeRequest) StepIntervalForQuery(name string) int64 {
}
}
expression, _ := govaluate.NewEvaluableExpressionWithFunctions(exprStr, EvalFuncs())
expression, err := govaluate.NewEvaluableExpressionWithFunctions(exprStr, EvalFuncs())
if err != nil {
return 0, errors.NewInvalidInputf(errors.CodeInvalidInput, "failed to parse expression for formula query %q: %s", name, err.Error())
}
steps := []int64{}
for _, v := range expression.Vars() {
steps = append(steps, stepsMap[v])
}
return LCMList(steps)
return LCMList(steps), nil
}
func (r *QueryRangeRequest) NumAggregationForQuery(name string) int64 {

View File

@@ -1798,3 +1798,108 @@ func TestQueryRangeRequest_GetQueriesSupportingZeroDefault(t *testing.T) {
})
}
}
func TestQueryRangeRequest_StepIntervalForQuery(t *testing.T) {
tests := []struct {
name string
request QueryRangeRequest
queryName string
wantStep int64
wantErr bool
}{
{
name: "trace operator returns its step interval",
request: QueryRangeRequest{
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
StepInterval: Step{Duration: 60 * time.Second},
Aggregations: []TraceAggregation{{Expression: "count()"}},
},
},
{
Type: QueryTypeTraceOperator,
Spec: QueryBuilderTraceOperator{
Name: "Trace Operator",
StepInterval: Step{Duration: 120 * time.Second},
Expression: "A",
},
},
},
},
},
queryName: "Trace Operator",
wantStep: 120000,
},
{
name: "formula computes LCM of referenced query steps",
request: QueryRangeRequest{
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "A",
Signal: telemetrytypes.SignalTraces,
StepInterval: Step{Duration: 60 * time.Second},
Aggregations: []TraceAggregation{{Expression: "count()"}},
},
},
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[TraceAggregation]{
Name: "B",
Signal: telemetrytypes.SignalTraces,
StepInterval: Step{Duration: 90 * time.Second},
Aggregations: []TraceAggregation{{Expression: "count()"}},
},
},
{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "F1",
Expression: "A + B",
},
},
},
},
},
queryName: "F1",
wantStep: 180000, // LCM of 60s and 90s = 180s
},
{
name: "invalid formula expression returns error",
request: QueryRangeRequest{
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "F1",
Expression: "A +",
},
},
},
},
},
queryName: "F1",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.request.StepIntervalForQuery(tt.queryName)
if tt.wantErr {
require.Error(t, err)
return
}
require.NoError(t, err)
assert.Equal(t, tt.wantStep, got)
})
}
}

View File

@@ -496,6 +496,19 @@ func (r *QueryRangeRequest) Validate(opts ...ValidationOption) error {
)
}
// raw/trace request types don't support metric queries;
// metrics are always aggregated and there is no raw form.
if r.RequestType == RequestTypeRaw || r.RequestType == RequestTypeRawStream || r.RequestType == RequestTypeTrace {
for _, envelope := range r.CompositeQuery.Queries {
if envelope.GetSignal() == telemetrytypes.SignalMetrics {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"raw request type is not supported for metric queries",
)
}
}
}
// Validate composite query
if err := r.CompositeQuery.Validate(opts...); err != nil {
return err
@@ -584,13 +597,7 @@ func validateQueryEnvelope(envelope QueryEnvelope, opts ...ValidationOption) err
"invalid formula spec",
)
}
if spec.Expression == "" {
return errors.NewInvalidInputf(
errors.CodeInvalidInput,
"formula expression is required",
)
}
return nil
return spec.Validate()
case QueryTypeJoin:
_, ok := envelope.Spec.(QueryBuilderJoin)
if !ok {

View File

@@ -518,7 +518,7 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
},
},
wantErr: true,
errMsg: "expression is required",
errMsg: "expression cannot be blank",
},
{
name: "promql with empty query should return error",
@@ -665,6 +665,57 @@ func TestQueryRangeRequest_ValidateCompositeQuery(t *testing.T) {
},
wantErr: false,
},
{
name: "raw request with metric query should return error",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeRaw,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[MetricAggregation]{
Name: "A",
Disabled: true,
Signal: telemetrytypes.SignalMetrics,
Aggregations: []MetricAggregation{},
},
},
{
Type: QueryTypeFormula,
Spec: QueryBuilderFormula{
Name: "F1",
Expression: "A",
},
},
},
},
},
wantErr: true,
errMsg: "raw request type is not supported for metric queries",
},
{
name: "raw request with log query without aggregations should pass",
request: QueryRangeRequest{
Start: 1640995200000,
End: 1640998800000,
RequestType: RequestTypeRaw,
CompositeQuery: CompositeQuery{
Queries: []QueryEnvelope{
{
Type: QueryTypeBuilder,
Spec: QueryBuilderQuery[LogAggregation]{
Name: "A",
Signal: telemetrytypes.SignalLogs,
Aggregations: []LogAggregation{},
},
},
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
@@ -733,7 +784,7 @@ func TestValidateQueryEnvelope(t *testing.T) {
},
requestType: RequestTypeTimeSeries,
wantErr: true,
errMsg: "expression is required",
errMsg: "expression cannot be blank",
},
{
name: "valid join spec",

View File

@@ -103,7 +103,7 @@ type RuleCondition struct {
MatchType MatchType `json:"matchType"`
TargetUnit string `json:"targetUnit,omitempty"`
Algorithm string `json:"algorithm,omitempty"`
Seasonality string `json:"seasonality,omitempty"`
Seasonality Seasonality `json:"seasonality,omitzero"`
SelectedQuery string `json:"selectedQueryName,omitempty"`
RequireMinPoints bool `json:"requireMinPoints,omitempty"`
RequiredNumPoints int `json:"requiredNumPoints,omitempty"`
@@ -158,10 +158,6 @@ func (rc *RuleCondition) SelectedQueryName() string {
return keys[len(keys)-1]
}
func (rc *RuleCondition) IsValid() bool {
return true
}
// ShouldEval checks if the further series should be evaluated at all for alerts.
func (rc *RuleCondition) ShouldEval(series *qbtypes.TimeSeries) bool {
return !rc.RequireMinPoints || len(series.Values) >= rc.RequiredNumPoints

View File

@@ -12,6 +12,7 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/types/alertmanagertypes"
qbtypes "github.com/SigNoz/signoz/pkg/types/querybuildertypes/querybuildertypesv5"
"github.com/SigNoz/signoz/pkg/valuer"
)
@@ -25,7 +26,8 @@ const (
)
const (
DefaultSchemaVersion = "v1"
DefaultSchemaVersion = "v1"
SchemaVersionV2Alpha1 = "v2alpha1"
)
type RuleDataKind string
@@ -39,9 +41,9 @@ type PostableRule struct {
AlertName string `json:"alert"`
AlertType AlertType `json:"alertType,omitempty"`
Description string `json:"description,omitempty"`
RuleType RuleType `json:"ruleType,omitempty"`
EvalWindow valuer.TextDuration `json:"evalWindow,omitempty"`
Frequency valuer.TextDuration `json:"frequency,omitempty"`
RuleType RuleType `json:"ruleType,omitzero"`
EvalWindow valuer.TextDuration `json:"evalWindow,omitzero"`
Frequency valuer.TextDuration `json:"frequency,omitzero"`
RuleCondition *RuleCondition `json:"condition,omitempty"`
Labels map[string]string `json:"labels,omitempty"`
@@ -64,7 +66,7 @@ type PostableRule struct {
type NotificationSettings struct {
GroupBy []string `json:"groupBy,omitempty"`
Renotify Renotify `json:"renotify,omitempty"`
Renotify Renotify `json:"renotify,omitzero"`
UsePolicy bool `json:"usePolicy,omitempty"`
// NewGroupEvalDelay is the grace period for new series to be excluded from alerts evaluation
NewGroupEvalDelay valuer.TextDuration `json:"newGroupEvalDelay,omitzero"`
@@ -93,6 +95,28 @@ func (ns *NotificationSettings) GetAlertManagerNotificationConfig() alertmanager
return alertmanagertypes.NewNotificationConfig(ns.GroupBy, renotifyInterval, noDataRenotifyInterval, ns.UsePolicy)
}
// Channels returns all unique channel names referenced by the rule's thresholds.
func (r *PostableRule) Channels() []string {
if r.RuleCondition == nil || r.RuleCondition.Thresholds == nil {
return nil
}
threshold, err := r.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
return nil
}
seen := make(map[string]struct{})
var channels []string
for _, receiver := range threshold.GetRuleReceivers() {
for _, ch := range receiver.Channels {
if _, ok := seen[ch]; !ok {
seen[ch] = struct{}{}
channels = append(channels, ch)
}
}
}
return channels
}
func (r *PostableRule) GetRuleRouteRequest(ruleID string) ([]*alertmanagertypes.PostableRoutePolicy, error) {
threshold, err := r.RuleCondition.Thresholds.GetRuleThreshold()
if err != nil {
@@ -185,15 +209,19 @@ func (r *PostableRule) processRuleDefaults() {
r.SchemaVersion = DefaultSchemaVersion
}
if r.EvalWindow.IsZero() {
r.EvalWindow = valuer.MustParseTextDuration("5m")
// v2alpha1 uses the Evaluation envelope for window/frequency;
// only default top-level fields for v1.
if r.SchemaVersion != SchemaVersionV2Alpha1 {
if r.EvalWindow.IsZero() {
r.EvalWindow = valuer.MustParseTextDuration("5m")
}
if r.Frequency.IsZero() {
r.Frequency = valuer.MustParseTextDuration("1m")
}
}
if r.Frequency.IsZero() {
r.Frequency = valuer.MustParseTextDuration("1m")
}
if r.RuleCondition != nil {
if r.RuleCondition != nil && r.RuleCondition.CompositeQuery != nil {
switch r.RuleCondition.CompositeQuery.QueryType {
case QueryTypeBuilder:
if r.RuleType.IsZero() {
@@ -259,6 +287,10 @@ func (r *PostableRule) MarshalJSON() ([]byte, error) {
aux.SchemaVersion = ""
aux.NotificationSettings = nil
return json.Marshal(aux)
case SchemaVersionV2Alpha1:
copyStruct := *r
aux := Alias(copyStruct)
return json.Marshal(aux)
default:
copyStruct := *r
aux := Alias(copyStruct)
@@ -292,23 +324,24 @@ func isValidLabelValue(v string) bool {
return utf8.ValidString(v)
}
// validate runs during UnmarshalJSON (read + write path).
// Preserves the original pre-existing checks only so that stored rules
// continue to load without errors.
func (r *PostableRule) validate() error {
var errs []error
if r.RuleCondition == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "rule condition is required")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition: field is required")
}
if r.Version != "v5" {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "only version v5 is supported, got %q", r.Version))
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "version: only v5 is supported, got %q", r.Version))
}
for k, v := range r.Labels {
if !isValidLabelName(k) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label name: %s", k))
}
if !isValidLabelValue(v) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label value: %s", v))
}
@@ -321,7 +354,196 @@ func (r *PostableRule) validate() error {
}
errs = append(errs, testTemplateParsing(r)...)
return errors.Join(errs...)
joined := errors.Join(errs...)
if joined != nil {
return errors.WrapInvalidInputf(joined, errors.CodeInvalidInput, "validation failed")
}
return nil
}
// Validate enforces all validation rules. For now, this is invoked on the write path
// (create, update, patch, test) before persisting. This is intentionally
// not called from UnmarshalJSON so that existing stored rules can always
// be loaded regardless of new validation rules.
func (r *PostableRule) Validate() error {
var errs []error
if r.AlertName == "" {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "alert: field is required"))
}
if r.RuleCondition == nil {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition: field is required")
}
if r.Version != "v5" {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "version: only v5 is supported, got %q", r.Version))
}
if r.AlertType != "" {
switch r.AlertType {
case AlertTypeMetric, AlertTypeTraces, AlertTypeLogs, AlertTypeExceptions:
default:
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"alertType: unsupported value %q; must be one of %q, %q, %q, %q",
r.AlertType, AlertTypeMetric, AlertTypeTraces, AlertTypeLogs, AlertTypeExceptions))
}
}
if !r.RuleType.IsZero() {
if err := r.RuleType.Validate(); err != nil {
errs = append(errs, err)
}
}
if r.RuleType == RuleTypeAnomaly && !r.RuleCondition.Seasonality.IsZero() {
if err := r.RuleCondition.Seasonality.Validate(); err != nil {
errs = append(errs, err)
}
}
if r.RuleCondition.CompositeQuery == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.compositeQuery: field is required"))
} else {
if len(r.RuleCondition.CompositeQuery.Queries) == 0 {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.compositeQuery.queries: must have at least one query"))
} else {
cq := &qbtypes.CompositeQuery{Queries: r.RuleCondition.CompositeQuery.Queries}
if err := cq.Validate(qbtypes.GetValidationOptions(qbtypes.RequestTypeTimeSeries)...); err != nil {
errs = append(errs, err)
}
}
}
if r.RuleCondition.SelectedQuery != "" && r.RuleCondition.CompositeQuery != nil && len(r.RuleCondition.CompositeQuery.Queries) > 0 {
found := false
for _, query := range r.RuleCondition.CompositeQuery.Queries {
if query.GetQueryName() == r.RuleCondition.SelectedQuery {
found = true
break
}
}
if !found {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.selectedQueryName: %q does not match any query in compositeQuery",
r.RuleCondition.SelectedQuery))
}
}
if r.RuleCondition.RequireMinPoints && r.RuleCondition.RequiredNumPoints <= 0 {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.requiredNumPoints: must be greater than 0 when requireMinPoints is enabled"))
}
errs = append(errs, r.validateSchemaVersion()...)
for k, v := range r.Labels {
if !isValidLabelName(k) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label name: %s", k))
}
if !isValidLabelValue(v) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid label value: %s", v))
}
}
for k := range r.Annotations {
if !isValidLabelName(k) {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid annotation name: %s", k))
}
}
errs = append(errs, testTemplateParsing(r)...)
joined := errors.Join(errs...)
if joined != nil {
return errors.WrapInvalidInputf(joined, errors.CodeInvalidInput, "validation failed")
}
return nil
}
func (r *PostableRule) validateSchemaVersion() []error {
switch r.SchemaVersion {
case DefaultSchemaVersion:
return r.validateV1()
case SchemaVersionV2Alpha1:
return r.validateV2Alpha1()
default:
return []error{errors.NewInvalidInputf(errors.CodeInvalidInput,
"schemaVersion: unsupported value %q; must be one of %q, %q",
r.SchemaVersion, DefaultSchemaVersion, SchemaVersionV2Alpha1)}
}
}
func (r *PostableRule) validateV1() []error {
var errs []error
if r.RuleCondition.Target == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.target: field is required for schemaVersion %q", DefaultSchemaVersion))
}
if r.RuleCondition.CompareOperator.IsZero() {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.op: field is required for schemaVersion %q", DefaultSchemaVersion))
} else if err := r.RuleCondition.CompareOperator.Validate(); err != nil {
errs = append(errs, err)
}
if r.RuleCondition.MatchType.IsZero() {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.matchType: field is required for schemaVersion %q", DefaultSchemaVersion))
} else if err := r.RuleCondition.MatchType.Validate(); err != nil {
errs = append(errs, err)
}
return errs
}
func (r *PostableRule) validateV2Alpha1() []error {
var errs []error
// TODO(srikanthccv): reject v1-only fields?
// if r.RuleCondition.Target != nil {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "condition.target: field is not used in schemaVersion %q; set target in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// if !r.RuleCondition.CompareOperator.IsZero() {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "condition.op: field is not used in schemaVersion %q; set op in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// if !r.RuleCondition.MatchType.IsZero() {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "condition.matchType: field is not used in schemaVersion %q; set matchType in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// if len(r.PreferredChannels) > 0 {
// errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
// "preferredChannels: field is not used in schemaVersion %q; set channels in condition.thresholds entries instead",
// SchemaVersionV2Alpha1))
// }
// Require v2alpha1-specific fields
if r.RuleCondition.Thresholds == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.thresholds: field is required for schemaVersion %q", SchemaVersionV2Alpha1))
}
if r.Evaluation == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"evaluation: field is required for schemaVersion %q", SchemaVersionV2Alpha1))
}
if r.NotificationSettings == nil {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"notificationSettings: field is required for schemaVersion %q", SchemaVersionV2Alpha1))
} else {
if r.NotificationSettings.Renotify.Enabled && !r.NotificationSettings.Renotify.ReNotifyInterval.IsPositive() {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput,
"notificationSettings.renotify.interval: must be a positive duration when renotify is enabled"))
}
}
return errs
}
func testTemplateParsing(rl *PostableRule) (errs []error) {
@@ -393,6 +615,10 @@ func (g *GettableRule) MarshalJSON() ([]byte, error) {
aux.SchemaVersion = ""
aux.NotificationSettings = nil
return json.Marshal(aux)
case SchemaVersionV2Alpha1:
copyStruct := *g
aux := Alias(copyStruct)
return json.Marshal(aux)
default:
copyStruct := *g
aux := Alias(copyStruct)

View File

@@ -34,15 +34,15 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"expression": "A",
"disabled": false,
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 10.0,
"matchType": "1",
@@ -77,14 +77,15 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"disabled": false,
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 5.0,
"matchType": "1",
@@ -112,12 +113,14 @@ func TestParseIntoRule(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "promql",
"promQueries": {
"A": {
"queries": [{
"type": "promql",
"spec": {
"name": "A",
"query": "rate(http_requests_total[5m])",
"disabled": false
}
}
}]
},
"target": 10.0,
"matchType": "1",
@@ -165,12 +168,13 @@ func TestParseIntoRule(t *testing.T) {
func TestParseIntoRuleSchemaVersioning(t *testing.T) {
tests := []struct {
name string
initRule PostableRule
content []byte
kind RuleDataKind
expectError bool
validate func(*testing.T, *PostableRule)
name string
initRule PostableRule
content []byte
kind RuleDataKind
expectError bool // unmarshal error (read path)
expectValidateError bool // Validate() error (write path only)
validate func(*testing.T, *PostableRule)
}{
{
name: "schema v1 - threshold name from severity label",
@@ -182,13 +186,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "cpu_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "cpu_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
},
}],
"unit": "percent"
},
"target": 85.0,
@@ -271,13 +277,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "memory_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "memory_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 90.0,
"matchType": "1",
@@ -312,13 +320,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "cpu_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "cpu_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
},
}],
"unit": "percent"
},
"target": 80.0,
@@ -394,49 +404,253 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
},
},
{
name: "schema v2 - does not populate thresholds and evaluation",
name: "schema v2alpha1 - uses explicit thresholds and evaluation",
initRule: PostableRule{},
content: []byte(`{
"alert": "V2Test",
"schemaVersion": "v2",
"alert": "V2Alpha1Test",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"thresholds": {
"kind": "basic",
"spec": [{
"name": "critical",
"target": 100.0,
"matchType": "1",
"op": "1"
}]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
}`),
kind: RuleDataKindJson,
expectError: false,
validate: func(t *testing.T, rule *PostableRule) {
if rule.SchemaVersion != SchemaVersionV2Alpha1 {
t.Errorf("Expected schemaVersion %q, got %q", SchemaVersionV2Alpha1, rule.SchemaVersion)
}
if rule.RuleCondition.Thresholds == nil {
t.Error("Expected Thresholds to be present for v2alpha1")
}
if rule.Evaluation == nil {
t.Error("Expected Evaluation to be present for v2alpha1")
}
if rule.NotificationSettings == nil {
t.Error("Expected NotificationSettings to be present for v2alpha1")
}
if rule.RuleType != RuleTypeThreshold {
t.Error("Expected RuleType to be auto-detected")
}
},
},
{
name: "schema v2alpha1 - rejects v1-only fields with suggestions",
initRule: PostableRule{},
content: []byte(`{
"alert": "MixedFieldsTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"preferredChannels": ["slack"],
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 100.0,
"matchType": "1",
"op": "1"
}
}`),
kind: RuleDataKindJson,
expectError: false,
validate: func(t *testing.T, rule *PostableRule) {
if rule.SchemaVersion != "v2" {
t.Errorf("Expected schemaVersion 'v2', got '%s'", rule.SchemaVersion)
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "schema v2alpha1 - requires evaluation",
initRule: PostableRule{},
content: []byte(`{
"alert": "MissingEvalTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"thresholds": {
"kind": "basic",
"spec": [{
"name": "critical",
"target": 100.0,
"matchType": "1",
"op": "1"
}]
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
if rule.RuleCondition.Thresholds != nil {
t.Error("Expected Thresholds to be nil for v2")
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "schema v2alpha1 - requires notificationSettings",
initRule: PostableRule{},
content: []byte(`{
"alert": "MissingNotifTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"thresholds": {
"kind": "basic",
"spec": [{
"name": "critical",
"target": 100.0,
"matchType": "1",
"op": "1"
}]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
}
if rule.Evaluation != nil {
t.Error("Expected Evaluation to be nil for v2")
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "schema v2alpha1 - requires thresholds for non-promql rules",
initRule: PostableRule{},
content: []byte(`{
"alert": "MissingThresholdsTest",
"schemaVersion": "v2alpha1",
"version": "v5",
"ruleType": "threshold_rule",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
if rule.EvalWindow.Duration() != 5*time.Minute {
t.Error("Expected default EvalWindow to be applied")
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "unsupported schema version",
initRule: PostableRule{},
content: []byte(`{
"alert": "BadSchemaTest",
"schemaVersion": "v3",
"version": "v5",
"condition": {
"compositeQuery": {
"queryType": "builder",
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}]
},
"target": 100.0,
"matchType": "1",
"op": "1"
}
if rule.RuleType != RuleTypeThreshold {
t.Error("Expected RuleType to be auto-detected")
}
},
}`),
kind: RuleDataKindJson,
expectValidateError: true,
},
{
name: "default schema version - defaults to v1 behavior",
@@ -447,13 +661,15 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"aggregateAttribute": {
"key": "test_metric"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "test_metric", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 75.0,
"matchType": "1",
@@ -480,13 +696,23 @@ func TestParseIntoRuleSchemaVersioning(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
rule := tt.initRule
err := json.Unmarshal(tt.content, &rule)
if tt.expectError && err == nil {
t.Errorf("Expected error but got none")
if tt.expectError {
if err == nil {
t.Errorf("Expected unmarshal error but got none")
}
return
}
if !tt.expectError && err != nil {
t.Errorf("Unexpected error: %v", err)
if err != nil {
t.Errorf("Unexpected unmarshal error: %v", err)
return
}
if tt.validate != nil && err == nil {
if tt.expectValidateError {
if err := rule.Validate(); err == nil {
t.Errorf("Expected Validate() error but got none")
}
return
}
if tt.validate != nil {
tt.validate(t, &rule)
}
})
@@ -500,15 +726,15 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
"condition": {
"compositeQuery": {
"queryType": "builder",
"builderQueries": {
"A": {
"expression": "A",
"disabled": false,
"aggregateAttribute": {
"key": "response_time"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "response_time", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 100.0,
"matchType": "1",
@@ -571,7 +797,7 @@ func TestParseIntoRuleThresholdGeneration(t *testing.T) {
func TestParseIntoRuleMultipleThresholds(t *testing.T) {
content := []byte(`{
"schemaVersion": "v2",
"schemaVersion": "v2alpha1",
"alert": "MultiThresholdAlert",
"ruleType": "threshold_rule",
"version": "v5",
@@ -579,19 +805,16 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
"compositeQuery": {
"queryType": "builder",
"unit": "%",
"builderQueries": {
"A": {
"expression": "A",
"disabled": false,
"aggregateAttribute": {
"key": "cpu_usage"
}
"queries": [{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [{"metricName": "cpu_usage", "spaceAggregation": "p50"}],
"stepInterval": "5m"
}
}
}]
},
"target": 90.0,
"matchType": "1",
"op": "1",
"selectedQuery": "A",
"thresholds": {
"kind": "basic",
@@ -616,6 +839,20 @@ func TestParseIntoRuleMultipleThresholds(t *testing.T) {
}
]
}
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m",
"frequency": "1m"
}
},
"notificationSettings": {
"renotify": {
"enabled": true,
"interval": "4h",
"alertStates": ["firing"]
}
}
}`)
rule := PostableRule{}

View File

@@ -54,6 +54,29 @@ func (CompareOperator) Enum() []any {
}
}
// Normalize returns the canonical (numeric) form of the operator.
// This ensures evaluation logic can use simple == checks against the canonical values.
func (c CompareOperator) Normalize() CompareOperator {
switch c {
case ValueIsAbove, ValueIsAboveLiteral, ValueIsAboveSymbol:
return ValueIsAbove
case ValueIsBelow, ValueIsBelowLiteral, ValueIsBelowSymbol:
return ValueIsBelow
case ValueIsEq, ValueIsEqLiteral, ValueIsEqLiteralShort, ValueIsEqSymbol:
return ValueIsEq
case ValueIsNotEq, ValueIsNotEqLiteral, ValueIsNotEqLiteralShort, ValueIsNotEqSymbol:
return ValueIsNotEq
case ValueAboveOrEq, ValueAboveOrEqLiteral, ValueAboveOrEqLiteralShort, ValueAboveOrEqSymbol:
return ValueAboveOrEq
case ValueBelowOrEq, ValueBelowOrEqLiteral, ValueBelowOrEqLiteralShort, ValueBelowOrEqSymbol:
return ValueBelowOrEq
case ValueOutsideBounds, ValueOutsideBoundsLiteral:
return ValueOutsideBounds
default:
return c
}
}
func (c CompareOperator) Validate() error {
switch c {
case ValueIsAbove,
@@ -70,10 +93,18 @@ func (c CompareOperator) Validate() error {
ValueIsNotEqLiteral,
ValueIsNotEqLiteralShort,
ValueIsNotEqSymbol,
ValueAboveOrEq,
ValueAboveOrEqLiteral,
ValueAboveOrEqLiteralShort,
ValueAboveOrEqSymbol,
ValueBelowOrEq,
ValueBelowOrEqLiteral,
ValueBelowOrEqLiteralShort,
ValueBelowOrEqSymbol,
ValueOutsideBounds,
ValueOutsideBoundsLiteral:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown comparison operator, known values are: ")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.op: unsupported value %q; must be one of above, below, equal, not_equal, above_or_equal, below_or_equal, outside_bounds", c.StringValue())
}
}

View File

@@ -11,7 +11,7 @@ type MatchType struct {
var (
AtleastOnce = MatchType{valuer.NewString("1")}
AtleastOnceLiteral = MatchType{valuer.NewString("atleast_once")}
AtleastOnceLiteral = MatchType{valuer.NewString("at_least_once")}
AllTheTimes = MatchType{valuer.NewString("2")}
AllTheTimesLiteral = MatchType{valuer.NewString("all_the_times")}
@@ -38,6 +38,24 @@ func (MatchType) Enum() []any {
}
}
// Normalize returns the canonical (numeric) form of the match type.
func (m MatchType) Normalize() MatchType {
switch m {
case AtleastOnce, AtleastOnceLiteral:
return AtleastOnce
case AllTheTimes, AllTheTimesLiteral:
return AllTheTimes
case OnAverage, OnAverageLiteral, OnAverageShort:
return OnAverage
case InTotal, InTotalLiteral, InTotalShort:
return InTotal
case Last, LastLiteral:
return Last
default:
return m
}
}
func (m MatchType) Validate() error {
switch m {
case
@@ -55,6 +73,6 @@ func (m MatchType) Validate() error {
LastLiteral:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown match type operator, known values are")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.matchType: unsupported value %q; must be one of at_least_once, all_the_times, on_average, in_total, last", m.StringValue())
}
}

View File

@@ -31,6 +31,6 @@ func (r RuleType) Validate() error {
RuleTypeAnomaly:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput, "unknown rule type, known values are")
return errors.NewInvalidInputf(errors.CodeInvalidInput, "ruleType: unsupported value %q; must be one of threshold_rule, promql_rule, anomaly_rule", r.StringValue())
}
}

View File

@@ -0,0 +1,35 @@
package ruletypes
import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Seasonality struct {
valuer.String
}
var (
SeasonalityHourly = Seasonality{valuer.NewString("hourly")}
SeasonalityDaily = Seasonality{valuer.NewString("daily")}
SeasonalityWeekly = Seasonality{valuer.NewString("weekly")}
)
func (Seasonality) Enum() []any {
return []any{
SeasonalityHourly,
SeasonalityDaily,
SeasonalityWeekly,
}
}
func (s Seasonality) Validate() error {
switch s {
case SeasonalityHourly, SeasonalityDaily, SeasonalityWeekly:
return nil
default:
return errors.NewInvalidInputf(errors.CodeInvalidInput,
"condition.seasonality: unsupported value %q; must be one of hourly, daily, weekly",
s.StringValue())
}
}

View File

@@ -113,6 +113,9 @@ func (r BasicRuleThresholds) GetRuleReceivers() []RuleReceivers {
}
func (r BasicRuleThresholds) Validate() error {
if len(r) == 0 {
return errors.NewInvalidInputf(errors.CodeInvalidInput, "condition.thresholds.spec: must have at least one threshold")
}
var errs []error
for _, basicThreshold := range r {
if err := basicThreshold.Validate(); err != nil {
@@ -189,7 +192,7 @@ func sortThresholds(thresholds []BasicRuleThreshold) {
targetI := thresholds[i].target(thresholds[i].TargetUnit) //for sorting we dont need rule unit
targetJ := thresholds[j].target(thresholds[j].TargetUnit)
switch thresholds[i].CompareOperator {
switch thresholds[i].CompareOperator.Normalize() {
case ValueIsAbove, ValueAboveOrEq, ValueOutsideBounds:
// For "above" operations, sort descending (higher values first)
return targetI > targetJ
@@ -234,16 +237,11 @@ func (b BasicRuleThreshold) Validate() error {
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "target value cannot be nil"))
}
switch b.CompareOperator {
case ValueIsAbove, ValueIsBelow, ValueIsEq, ValueIsNotEq, ValueAboveOrEq, ValueBelowOrEq, ValueOutsideBounds:
default:
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid compare operation: %s", b.CompareOperator.StringValue()))
if err := b.CompareOperator.Validate(); err != nil {
errs = append(errs, err)
}
switch b.MatchType {
case AtleastOnce, AllTheTimes, OnAverage, InTotal, Last:
default:
errs = append(errs, errors.NewInvalidInputf(errors.CodeInvalidInput, "invalid match type: %s", b.MatchType.StringValue()))
if err := b.MatchType.Validate(); err != nil {
errs = append(errs, err)
}
return errors.Join(errs...)
@@ -268,6 +266,33 @@ func PrepareSampleLabelsForRule(seriesLabels []*qbtypes.Label, thresholdName str
return lb.Labels()
}
// matchesCompareOp checks if a value matches the compare operator against target.
func matchesCompareOp(op CompareOperator, value, target float64) bool {
switch op {
case ValueIsAbove:
return value > target
case ValueIsBelow:
return value < target
case ValueIsEq:
return value == target
case ValueIsNotEq:
return value != target
case ValueAboveOrEq:
return value >= target
case ValueBelowOrEq:
return value <= target
case ValueOutsideBounds:
return math.Abs(value) >= target
default:
return false
}
}
// negatesCompareOp checks if a value does NOT match the compare operator against target.
func negatesCompareOp(op CompareOperator, value, target float64) bool {
return !matchesCompareOp(op, value, target)
}
func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, target float64) (Sample, bool) {
var shouldAlert bool
var alertSmpl Sample
@@ -278,63 +303,35 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
return alertSmpl, false
}
switch b.MatchType {
// Normalize to canonical forms so evaluation uses simple == checks
op := b.CompareOperator.Normalize()
matchType := b.MatchType.Normalize()
switch matchType {
case AtleastOnce:
// If any sample matches the condition, the rule is firing.
if b.CompareOperator == ValueIsAbove {
for _, smpl := range series.Values {
if smpl.Value > target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueIsBelow {
for _, smpl := range series.Values {
if smpl.Value < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueIsEq {
for _, smpl := range series.Values {
if smpl.Value == target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueIsNotEq {
for _, smpl := range series.Values {
if smpl.Value != target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
} else if b.CompareOperator == ValueOutsideBounds {
for _, smpl := range series.Values {
if math.Abs(smpl.Value) >= target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
for _, smpl := range series.Values {
if matchesCompareOp(op, smpl.Value, target) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = true
break
}
}
case AllTheTimes:
// If all samples match the condition, the rule is firing.
shouldAlert = true
alertSmpl = Sample{Point: Point{V: target}, Metric: lbls}
if b.CompareOperator == ValueIsAbove {
for _, smpl := range series.Values {
if smpl.Value <= target {
shouldAlert = false
break
}
for _, smpl := range series.Values {
if negatesCompareOp(op, smpl.Value, target) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
break
}
// use min value from the series
if shouldAlert {
}
if shouldAlert {
switch op {
case ValueIsAbove, ValueAboveOrEq, ValueOutsideBounds:
// use min value from the series
var minValue = math.Inf(1)
for _, smpl := range series.Values {
if smpl.Value < minValue {
@@ -342,15 +339,8 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
}
alertSmpl = Sample{Point: Point{V: minValue}, Metric: lbls}
}
} else if b.CompareOperator == ValueIsBelow {
for _, smpl := range series.Values {
if smpl.Value >= target {
shouldAlert = false
break
}
}
if shouldAlert {
case ValueIsBelow, ValueBelowOrEq:
// use max value from the series
var maxValue = math.Inf(-1)
for _, smpl := range series.Values {
if smpl.Value > maxValue {
@@ -358,23 +348,8 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
}
alertSmpl = Sample{Point: Point{V: maxValue}, Metric: lbls}
}
} else if b.CompareOperator == ValueIsEq {
for _, smpl := range series.Values {
if smpl.Value != target {
shouldAlert = false
break
}
}
} else if b.CompareOperator == ValueIsNotEq {
for _, smpl := range series.Values {
if smpl.Value == target {
shouldAlert = false
break
}
}
// use any non-inf or nan value from the series
if shouldAlert {
case ValueIsNotEq:
// use any non-inf and non-nan value from the series
for _, smpl := range series.Values {
if !math.IsInf(smpl.Value, 0) && !math.IsNaN(smpl.Value) {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
@@ -382,14 +357,6 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
}
}
} else if b.CompareOperator == ValueOutsideBounds {
for _, smpl := range series.Values {
if math.Abs(smpl.Value) < target {
alertSmpl = Sample{Point: Point{V: smpl.Value}, Metric: lbls}
shouldAlert = false
break
}
}
}
case OnAverage:
// If the average of all samples matches the condition, the rule is firing.
@@ -403,32 +370,10 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
}
avg := sum / count
alertSmpl = Sample{Point: Point{V: avg}, Metric: lbls}
switch b.CompareOperator {
case ValueIsAbove:
if avg > target {
shouldAlert = true
}
case ValueIsBelow:
if avg < target {
shouldAlert = true
}
case ValueIsEq:
if avg == target {
shouldAlert = true
}
case ValueIsNotEq:
if avg != target {
shouldAlert = true
}
case ValueOutsideBounds:
if math.Abs(avg) >= target {
shouldAlert = true
}
}
shouldAlert = matchesCompareOp(op, avg, target)
case InTotal:
// If the sum of all samples matches the condition, the rule is firing.
var sum float64
for _, smpl := range series.Values {
if math.IsNaN(smpl.Value) || math.IsInf(smpl.Value, 0) {
continue
@@ -436,50 +381,12 @@ func (b BasicRuleThreshold) shouldAlertWithTarget(series *qbtypes.TimeSeries, ta
sum += smpl.Value
}
alertSmpl = Sample{Point: Point{V: sum}, Metric: lbls}
switch b.CompareOperator {
case ValueIsAbove:
if sum > target {
shouldAlert = true
}
case ValueIsBelow:
if sum < target {
shouldAlert = true
}
case ValueIsEq:
if sum == target {
shouldAlert = true
}
case ValueIsNotEq:
if sum != target {
shouldAlert = true
}
case ValueOutsideBounds:
if math.Abs(sum) >= target {
shouldAlert = true
}
}
shouldAlert = matchesCompareOp(op, sum, target)
case Last:
// If the last sample matches the condition, the rule is firing.
shouldAlert = false
alertSmpl = Sample{Point: Point{V: series.Values[len(series.Values)-1].Value}, Metric: lbls}
switch b.CompareOperator {
case ValueIsAbove:
if series.Values[len(series.Values)-1].Value > target {
shouldAlert = true
}
case ValueIsBelow:
if series.Values[len(series.Values)-1].Value < target {
shouldAlert = true
}
case ValueIsEq:
if series.Values[len(series.Values)-1].Value == target {
shouldAlert = true
}
case ValueIsNotEq:
if series.Values[len(series.Values)-1].Value != target {
shouldAlert = true
}
}
lastValue := series.Values[len(series.Values)-1].Value
alertSmpl = Sample{Point: Point{V: lastValue}, Metric: lbls}
shouldAlert = matchesCompareOp(op, lastValue, target)
}
return alertSmpl, shouldAlert
}

File diff suppressed because it is too large Load Diff

View File

@@ -243,6 +243,7 @@ type Store interface {
// Service Account Role
CreateServiceAccountRole(context.Context, *ServiceAccountRole) error
DeleteServiceAccountRoles(context.Context, valuer.UUID) error
DeleteServiceAccountRole(context.Context, valuer.UUID, valuer.UUID) error
// Service Account Factor API Key

View File

@@ -42,7 +42,6 @@ type User struct {
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
IsRoot bool `bun:"is_root" json:"isRoot"`
Status valuer.String `bun:"status" json:"status"`
DeletedAt time.Time `bun:"deleted_at" json:"-"`
TimeAuditable
}
@@ -136,7 +135,6 @@ func NewUserFromDeprecatedUser(deprecatedUser *DeprecatedUser) *User {
OrgID: deprecatedUser.OrgID,
IsRoot: deprecatedUser.IsRoot,
Status: deprecatedUser.Status,
DeletedAt: deprecatedUser.DeletedAt,
TimeAuditable: deprecatedUser.TimeAuditable,
}
}

View File

@@ -24,6 +24,10 @@ USER_EDITOR_NAME = "editor"
USER_EDITOR_EMAIL = "editor@integration.test"
USER_EDITOR_PASSWORD = "password123Z$"
USER_VIEWER_NAME = "viewer"
USER_VIEWER_EMAIL = "viewer@integration.test"
USER_VIEWER_PASSWORD = "password123Z$"
@pytest.fixture(name="create_user_admin", scope="package")
def create_user_admin(

View File

@@ -0,0 +1,115 @@
"""Reusable helpers for user API tests."""
from http import HTTPStatus
from typing import Dict
import requests
from fixtures import types
USERS_BASE = "/api/v2/users"
def create_active_user(
signoz: types.SigNoz,
admin_token: str,
email: str,
role: str,
password: str,
name: str = "",
) -> str:
"""Invite a user and activate via resetPassword. Returns user ID."""
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": email, "role": role, "name": name},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": password, "token": invited_user["token"]},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT, response.text
return invited_user["id"]
def find_user_by_email(signoz: types.SigNoz, token: str, email: str) -> Dict:
"""Find a user by email from the user list. Raises AssertionError if not found."""
response = requests.get(
signoz.self.host_configs["8080"].get(USERS_BASE),
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK, response.text
user = next((u for u in response.json()["data"] if u["email"] == email), None)
assert user is not None, f"User with email '{email}' not found"
return user
def find_user_with_roles_by_email(signoz: types.SigNoz, token: str, email: str) -> Dict:
"""Find a user by email and return UserWithRoles (user fields + userRoles).
Raises AssertionError if the user is not found.
"""
user = find_user_by_email(signoz, token, email)
response = requests.get(
signoz.self.host_configs["8080"].get(f"{USERS_BASE}/{user['id']}"),
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK, response.text
return response.json()["data"]
def assert_user_has_role(data: Dict, role_name: str) -> None:
"""Assert that a UserWithRoles response contains the expected managed role."""
role_names = {ur["role"]["name"] for ur in data.get("userRoles", [])}
assert role_name in role_names, f"Expected role '{role_name}' in {role_names}"
def change_user_role(
signoz: types.SigNoz,
admin_token: str,
user_id: str,
old_role: str,
new_role: str,
) -> None:
"""Change a user's role (remove old, assign new).
Role names should be managed role names (e.g. signoz-editor).
"""
# Get current roles to find the old role's ID
response = requests.get(
signoz.self.host_configs["8080"].get(f"{USERS_BASE}/{user_id}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK, response.text
roles = response.json()["data"]
old_role_entry = next((r for r in roles if r["name"] == old_role), None)
assert old_role_entry is not None, f"User does not have role '{old_role}'"
# Remove old role
response = requests.delete(
signoz.self.host_configs["8080"].get(
f"{USERS_BASE}/{user_id}/roles/{old_role_entry['id']}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT, response.text
# Assign new role
response = requests.post(
signoz.self.host_configs["8080"].get(f"{USERS_BASE}/{user_id}/roles"),
json={"name": new_role},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK, response.text

View File

@@ -12,9 +12,12 @@ from fixtures.auth import (
USER_ADMIN_PASSWORD,
add_license,
)
from fixtures.authutils import (
assert_user_has_role,
find_user_with_roles_by_email,
)
from fixtures.idputils import (
get_saml_domain,
get_user_by_email,
perform_saml_login,
)
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
@@ -131,26 +134,10 @@ def test_saml_authn(
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Assert that the user was created in signoz.
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
found_user = find_user_with_roles_by_email(
signoz, admin_token, "viewer@saml.integration.test"
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "viewer@saml.integration.test"
),
None,
)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_idp_initiated_saml_authn(
@@ -182,26 +169,10 @@ def test_idp_initiated_saml_authn(
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Assert that the user was created in signoz.
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
found_user = find_user_with_roles_by_email(
signoz, admin_token, "viewer.idp.initiated@saml.integration.test"
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "viewer.idp.initiated@saml.integration.test"
),
None,
)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_saml_update_domain_with_group_mappings(
@@ -268,10 +239,9 @@ def test_saml_role_mapping_single_group_admin(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
assert_user_has_role(found_user, "signoz-admin")
def test_saml_role_mapping_single_group_editor(
@@ -294,10 +264,9 @@ def test_saml_role_mapping_single_group_editor(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
def test_saml_role_mapping_multiple_groups_highest_wins(
@@ -324,10 +293,9 @@ def test_saml_role_mapping_multiple_groups_highest_wins(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
def test_saml_role_mapping_explicit_viewer_group(
@@ -351,10 +319,9 @@ def test_saml_role_mapping_explicit_viewer_group(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_saml_role_mapping_unmapped_group_uses_default(
@@ -377,10 +344,9 @@ def test_saml_role_mapping_unmapped_group_uses_default(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_saml_update_domain_with_use_role_claim(
@@ -454,10 +420,9 @@ def test_saml_role_mapping_role_claim_takes_precedence(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
assert_user_has_role(found_user, "signoz-admin")
def test_saml_role_mapping_invalid_role_claim_fallback(
@@ -484,10 +449,9 @@ def test_saml_role_mapping_invalid_role_claim_fallback(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
def test_saml_role_mapping_case_insensitive(
@@ -514,10 +478,9 @@ def test_saml_role_mapping_case_insensitive(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
assert_user_has_role(found_user, "signoz-admin")
def test_saml_name_mapping(
@@ -539,13 +502,12 @@ def test_saml_name_mapping(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert (
found_user["displayName"] == "Jane"
) # We are only mapping the first name here
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_saml_empty_name_fallback(
@@ -567,10 +529,9 @@ def test_saml_empty_name_fallback(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_saml_sso_login_activates_pending_invite_user(
@@ -610,10 +571,9 @@ def test_saml_sso_login_activates_pending_invite_user(
)
# User should be active with VIEWER role from SSO
found_user = get_user_by_email(signoz, admin_token, email)
assert found_user is not None
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user["status"] == "active"
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_saml_sso_deleted_user_gets_new_user_on_login(
@@ -680,18 +640,26 @@ def test_saml_sso_deleted_user_gets_new_user_on_login(
# Verify a NEW active user was auto-provisioned via SSO
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
signoz.self.host_configs["8080"].get("/api/v2/users"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
found_user = next(
(
user
for user in response.json()["data"]
if user["email"] == email and user["id"] != user_id
),
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
new_user = next(
(user for user in users if user["email"] == email and user["id"] != user_id),
None,
)
assert found_user is not None
assert found_user["status"] == "active"
assert found_user["role"] == "VIEWER" # default role from SSO domain config
assert new_user is not None
assert new_user["status"] == "active"
# Fetch full user with roles to check the assigned role
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{new_user['id']}"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
found_user = response.json()["data"]
assert_user_has_role(
found_user, "signoz-viewer"
) # default role from SSO domain config

View File

@@ -11,9 +11,12 @@ from fixtures.auth import (
USER_ADMIN_PASSWORD,
add_license,
)
from fixtures.authutils import (
assert_user_has_role,
find_user_with_roles_by_email,
)
from fixtures.idputils import (
get_oidc_domain,
get_user_by_email,
perform_oidc_login,
)
from fixtures.types import Operation, SigNoz, TestContainerDocker, TestContainerIDP
@@ -112,26 +115,10 @@ def test_oidc_authn(
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Assert that the user was created in signoz.
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
found_user = find_user_with_roles_by_email(
signoz, admin_token, "viewer@oidc.integration.test"
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "viewer@oidc.integration.test"
),
None,
)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_oidc_update_domain_with_group_mappings(
@@ -205,10 +192,9 @@ def test_oidc_role_mapping_single_group_admin(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
assert_user_has_role(found_user, "signoz-admin")
def test_oidc_role_mapping_single_group_editor(
@@ -231,10 +217,9 @@ def test_oidc_role_mapping_single_group_editor(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
def test_oidc_role_mapping_multiple_groups_highest_wins(
@@ -261,10 +246,9 @@ def test_oidc_role_mapping_multiple_groups_highest_wins(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
assert_user_has_role(found_user, "signoz-admin")
def test_oidc_role_mapping_explicit_viewer_group(
@@ -288,10 +272,9 @@ def test_oidc_role_mapping_explicit_viewer_group(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_oidc_role_mapping_unmapped_group_uses_default(
@@ -314,10 +297,9 @@ def test_oidc_role_mapping_unmapped_group_uses_default(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
def test_oidc_update_domain_with_use_role_claim(
@@ -394,10 +376,9 @@ def test_oidc_role_mapping_role_claim_takes_precedence(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "ADMIN"
assert_user_has_role(found_user, "signoz-admin")
def test_oidc_role_mapping_invalid_role_claim_fallback(
@@ -426,10 +407,9 @@ def test_oidc_role_mapping_invalid_role_claim_fallback(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
def test_oidc_role_mapping_case_insensitive(
@@ -456,10 +436,9 @@ def test_oidc_role_mapping_case_insensitive(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = get_user_by_email(signoz, admin_token, email)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
def test_oidc_name_mapping(
@@ -482,20 +461,11 @@ def test_oidc_name_mapping(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
found_user = next((u for u in users if u["email"] == email), None)
assert found_user is not None
# Keycloak concatenates firstName + lastName into "name" claim
assert found_user["displayName"] == "John Doe"
assert found_user["role"] == "VIEWER" # Default role
assert_user_has_role(found_user, "signoz-viewer") # Default role
def test_oidc_empty_name_uses_fallback(
@@ -518,19 +488,10 @@ def test_oidc_empty_name_uses_fallback(
)
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
found_user = next((u for u in users if u["email"] == email), None)
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
# User should still be created even with empty name
assert found_user is not None
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")
# Note: displayName may be empty - this is a known limitation
@@ -570,16 +531,7 @@ def test_oidc_sso_login_activates_pending_invite_user(
signoz, idp, driver, get_session_context, idp_login, email, "password123"
)
# User should be active with ADMIN role from invite, not VIEWER from SSO
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
found_user = next(
(user for user in response.json()["data"] if user["email"] == email),
None,
)
assert found_user is not None
# User should be active with VIEWER role from SSO, not ADMIN from invite
found_user = find_user_with_roles_by_email(signoz, admin_token, email)
assert found_user["status"] == "active"
assert found_user["role"] == "VIEWER"
assert_user_has_role(found_user, "signoz-viewer")

View File

@@ -4,6 +4,18 @@ from typing import Callable
import requests
from fixtures import types
from fixtures.auth import (
USER_ADMIN_EMAIL,
USER_ADMIN_PASSWORD,
USER_EDITOR_EMAIL,
USER_EDITOR_NAME,
USER_EDITOR_PASSWORD,
USER_VIEWER_EMAIL,
)
from fixtures.authutils import (
assert_user_has_role,
find_user_with_roles_by_email,
)
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@@ -58,8 +70,8 @@ def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
"name": "admin",
"orgId": "",
"orgName": "integration.test",
"email": "admin@integration.test",
"password": "password123Z$",
"email": USER_ADMIN_EMAIL,
"password": USER_ADMIN_PASSWORD,
},
timeout=2,
)
@@ -72,130 +84,73 @@ def test_register(signoz: types.SigNoz, get_token: Callable[[str, str], str]) ->
assert response.status_code == HTTPStatus.OK
assert response.json()["setupCompleted"] is True
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "admin@integration.test"),
None,
)
assert found_user is not None
assert found_user["role"] == "ADMIN"
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user["id"]}"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
assert response.json()["data"]["role"] == "ADMIN"
# Verify admin user exists via v2
found_user = find_user_with_roles_by_email(signoz, admin_token, USER_ADMIN_EMAIL)
assert_user_has_role(found_user, "signoz-admin")
def test_invite_and_register(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
def test_invite(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Generate an invite token for the editor user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "editor@integration.test", "role": "EDITOR", "name": "editor"},
json={"email": USER_EDITOR_EMAIL, "role": "EDITOR", "name": USER_EDITOR_NAME},
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
assert invited_user["email"] == "editor@integration.test"
assert invited_user["email"] == USER_EDITOR_EMAIL
assert invited_user["role"] == "EDITOR"
# Verify the user user appears in the users list but as pending_invite status
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "editor@integration.test"),
None,
)
assert found_user is not None
# Verify the user appears in the users list but as pending_invite status
found_user = find_user_with_roles_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
assert found_user["status"] == "pending_invite"
assert found_user["role"] == "EDITOR"
assert_user_has_role(found_user, "signoz-editor")
reset_token = invited_user["token"]
# Reset the password to complete the invite flow (activates the user and also grants authz)
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
json={"password": USER_EDITOR_PASSWORD, "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Verify the user can now log in
editor_token = get_token("editor@integration.test", "password123Z$")
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
assert editor_token is not None
# Verify that an admin endpoint cannot be called by the editor user
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {editor_token}"},
)
assert response.status_code == HTTPStatus.FORBIDDEN
# Verify that the editor user status has been updated to ACTIVE
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={
"Authorization": f"Bearer {get_token("admin@integration.test", "password123Z$")}"
},
admin_token_fresh = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
found_user = find_user_with_roles_by_email(
signoz, admin_token_fresh, USER_EDITOR_EMAIL
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "editor@integration.test"),
None,
)
assert found_user is not None
assert found_user["role"] == "EDITOR"
assert found_user["displayName"] == "editor"
assert found_user["email"] == "editor@integration.test"
assert_user_has_role(found_user, "signoz-editor")
assert found_user["displayName"] == USER_EDITOR_NAME
assert found_user["email"] == USER_EDITOR_EMAIL
assert found_user["status"] == "active"
def test_revoke_invite_and_register(
def test_revoke_invite(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Invite the viewer user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "viewer@integration.test", "role": "VIEWER"},
json={"email": USER_VIEWER_EMAIL, "role": "VIEWER"},
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
reset_token = invited_user["token"]
@@ -216,30 +171,76 @@ def test_revoke_invite_and_register(
assert response.status_code in (HTTPStatus.BAD_REQUEST, HTTPStatus.NOT_FOUND)
def test_self_access(
def test_provision_user(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
"""
Simulates the upstream zeus provisioning flow:
1. Invite a user as ADMIN (register already happened via test_register)
2. List users to find the invited user's ID
3. Get reset password token for that user
4. Use the token to set the password and activate the user
5. Verify the user can log in
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
provisioned_email = "zeus-provisioned@integration.test"
provisioned_name = "zeus provisioned user"
provisioned_password = "password123Z$"
# Step 1: Invite user as ADMIN (mirrors zeus inviteUserOnSigNoz)
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={
"email": provisioned_email,
"name": provisioned_name,
"role": "ADMIN",
},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.CREATED, response.text
# Step 2: List users to find the invited user's ID (mirrors zeus GET /api/v1/user)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
found_user = next((u for u in users if u["email"] == provisioned_email), None)
assert found_user is not None
user_id = found_user["id"]
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "editor@integration.test"),
None,
)
# Step 3: Get reset password token (mirrors zeus GET /api/v1/getResetPasswordToken/{id})
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{found_user['id']}"),
timeout=2,
signoz.self.host_configs["8080"].get(
f"/api/v1/getResetPasswordToken/{user_id}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["data"]["role"] == "EDITOR"
reset_token = response.json()["data"]["token"]
assert reset_token is not None
assert reset_token != ""
# Step 4: Use the token to set password and activate user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": provisioned_password, "token": reset_token},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Step 5: Verify the provisioned user can log in and is active with admin role
user_token = get_token(provisioned_email, provisioned_password)
assert user_token is not None
provisioned_user = find_user_with_roles_by_email(
signoz, admin_token, provisioned_email
)
assert provisioned_user["status"] == "active"
assert provisioned_user["displayName"] == provisioned_name
assert_user_has_role(provisioned_user, "signoz-admin")

View File

@@ -5,56 +5,45 @@ import requests
from sqlalchemy import sql
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.authutils import find_user_by_email
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
PASSWORD_USER_EMAIL = "admin+password@integration.test"
PASSWORD_USER_PASSWORD = "password123Z$"
def test_change_password(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Create another admin user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "admin+password@integration.test", "role": "ADMIN"},
json={"email": PASSWORD_USER_EMAIL, "role": "ADMIN"},
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
reset_token = invited_user["token"]
# Reset password to activate user
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
json={"password": PASSWORD_USER_PASSWORD, "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Get the user id
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "admin+password@integration.test"
),
None,
)
# Get the user id via v2
found_user = find_user_by_email(signoz, admin_token, PASSWORD_USER_EMAIL)
# Try logging in with the password
token = get_token("admin+password@integration.test", "password123Z$")
token = get_token(PASSWORD_USER_EMAIL, PASSWORD_USER_PASSWORD)
assert token is not None
# Try changing the password with a bad old password which should fail
@@ -65,7 +54,7 @@ def test_change_password(
json={
"userId": f"{found_user['id']}",
"oldPassword": "password",
"newPassword": "password123Z$",
"newPassword": PASSWORD_USER_PASSWORD,
},
timeout=2,
headers={"Authorization": f"Bearer {token}"},
@@ -80,7 +69,7 @@ def test_change_password(
),
json={
"userId": f"{found_user['id']}",
"oldPassword": "password123Z$",
"oldPassword": PASSWORD_USER_PASSWORD,
"newPassword": "password123Znew$",
},
timeout=2,
@@ -90,33 +79,17 @@ def test_change_password(
assert response.status_code == HTTPStatus.NO_CONTENT
# Try logging in with the new password
token = get_token("admin+password@integration.test", "password123Znew$")
token = get_token(PASSWORD_USER_EMAIL, "password123Znew$")
assert token is not None
def test_reset_password(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Get the user id for admin+password@integration.test
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "admin+password@integration.test"
),
None,
)
# Get the user id via v2
found_user = find_user_by_email(signoz, admin_token, PASSWORD_USER_EMAIL)
response = requests.get(
signoz.self.host_configs["8080"].get(
@@ -148,33 +121,17 @@ def test_reset_password(
assert response.status_code == HTTPStatus.NO_CONTENT
token = get_token("admin+password@integration.test", "password123Z$NEWNEW#!")
token = get_token(PASSWORD_USER_EMAIL, "password123Z$NEWNEW#!")
assert token is not None
def test_reset_password_with_no_password(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Get the user id for admin+password@integration.test
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(
user
for user in user_response
if user["email"] == "admin+password@integration.test"
),
None,
)
# Get the user id via v2
found_user = find_user_by_email(signoz, admin_token, PASSWORD_USER_EMAIL)
with signoz.sqlstore.conn.connect() as conn:
result = conn.execute(
@@ -205,7 +162,7 @@ def test_reset_password_with_no_password(
assert response.status_code == HTTPStatus.NO_CONTENT
token = get_token("admin+password@integration.test", "FINALPASSword123!#[")
token = get_token(PASSWORD_USER_EMAIL, "FINALPASSword123!#[")
assert token is not None
@@ -220,7 +177,7 @@ def test_forgot_password_returns_204_for_nonexistent_email(
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
params={
"email": "admin@integration.test",
"email": USER_ADMIN_EMAIL,
"ref": f"{signoz.self.host_configs['8080'].base()}",
},
timeout=5,
@@ -253,20 +210,22 @@ def test_forgot_password_creates_reset_token(
3. Use the token to reset password
4. Verify user can login with new password
"""
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
forgot_email = "forgot@integration.test"
# Create a user specifically for testing forgot password
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={
"email": "forgot@integration.test",
"email": forgot_email,
"role": "EDITOR",
"name": "forgotpassword user",
},
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
reset_token = invited_user["token"]
@@ -283,7 +242,7 @@ def test_forgot_password_creates_reset_token(
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
params={
"email": "forgot@integration.test",
"email": forgot_email,
"ref": f"{signoz.self.host_configs['8080'].base()}",
},
timeout=5,
@@ -295,7 +254,7 @@ def test_forgot_password_creates_reset_token(
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/factor_password/forgot"),
json={
"email": "forgot@integration.test",
"email": forgot_email,
"orgId": org_id,
"frontendBaseURL": signoz.self.host_configs["8080"].base(),
},
@@ -304,19 +263,7 @@ def test_forgot_password_creates_reset_token(
assert response.status_code == HTTPStatus.NO_CONTENT
# Verify reset password token was created by querying the database
# First, get the user ID
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "forgot@integration.test"),
None,
)
assert found_user is not None
found_user = find_user_by_email(signoz, admin_token, forgot_email)
reset_token = None
# Query the database directly to get the reset password token
@@ -325,7 +272,7 @@ def test_forgot_password_creates_reset_token(
result = conn.execute(
sql.text(
"""
SELECT rpt.token
SELECT rpt.token
FROM reset_password_token rpt
JOIN factor_password fp ON rpt.password_id = fp.id
WHERE fp.user_id = :user_id
@@ -351,12 +298,12 @@ def test_forgot_password_creates_reset_token(
assert response.status_code == HTTPStatus.NO_CONTENT
# Verify user can login with the new password
user_token = get_token("forgot@integration.test", "newSecurePassword123Z$!")
user_token = get_token(forgot_email, "newSecurePassword123Z$!")
assert user_token is not None
# Verify old password no longer works
try:
get_token("forgot@integration.test", "originalPassword123Z$")
get_token(forgot_email, "originalPassword123Z$")
assert False, "Old password should not work after reset"
except AssertionError:
pass # Expected - old password should fail
@@ -368,27 +315,18 @@ def test_reset_password_with_expired_token(
"""
Test that resetting password with an expired token fails.
"""
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Get user ID for the forgot@integration.test user
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.OK
user_response = response.json()["data"]
found_user = next(
(user for user in user_response if user["email"] == "forgot@integration.test"),
None,
)
assert found_user is not None
forgot_email = "forgot@integration.test"
# Get user ID via v2
found_user = find_user_by_email(signoz, admin_token, forgot_email)
# Get org ID
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/sessions/context"),
params={
"email": "forgot@integration.test",
"email": forgot_email,
"ref": f"{signoz.self.host_configs['8080'].base()}",
},
timeout=5,
@@ -400,7 +338,7 @@ def test_reset_password_with_expired_token(
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v2/factor_password/forgot"),
json={
"email": "forgot@integration.test",
"email": forgot_email,
"orgId": org_id,
"frontendBaseURL": signoz.self.host_configs["8080"].base(),
},
@@ -432,8 +370,8 @@ def test_reset_password_with_expired_token(
conn.execute(
sql.text(
"""
UPDATE reset_password_token
SET expires_at = :expired_time
UPDATE reset_password_token
SET expires_at = :expired_time
WHERE id = :token_id
"""
),

View File

@@ -4,23 +4,36 @@ from typing import Callable
import requests
from fixtures import types
from fixtures.auth import (
USER_ADMIN_EMAIL,
USER_ADMIN_PASSWORD,
USER_EDITOR_EMAIL,
USER_EDITOR_PASSWORD,
)
from fixtures.authutils import (
change_user_role,
create_active_user,
)
ROLECHANGE_USER_EMAIL = "admin+rolechange@integration.test"
ROLECHANGE_USER_PASSWORD = "password123Z$"
def test_change_role(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
admin_token = get_token("admin@integration.test", "password123Z$")
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Create a new user as VIEWER
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/invite"),
json={"email": "admin+rolechange@integration.test", "role": "VIEWER"},
json={"email": ROLECHANGE_USER_EMAIL, "role": "VIEWER"},
timeout=2,
headers={"Authorization": f"Bearer {admin_token}"},
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
reset_token = invited_user["token"]
@@ -28,23 +41,22 @@ def test_change_role(
# Activate user via reset password
response = requests.post(
signoz.self.host_configs["8080"].get("/api/v1/resetPassword"),
json={"password": "password123Z$", "token": reset_token},
json={"password": ROLECHANGE_USER_PASSWORD, "token": reset_token},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# Make some API calls as new user
new_user_token = get_token("admin+rolechange@integration.test", "password123Z$")
new_user_token = get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
timeout=2,
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {new_user_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
new_user_id = response.json()["data"]["id"]
new_user_data = response.json()["data"]
new_user_id = new_user_data["id"]
# Make some API call which is protected
response = requests.get(
@@ -55,27 +67,27 @@ def test_change_role(
assert response.status_code == HTTPStatus.FORBIDDEN
# Change the new user's role - move to ADMIN
# Change the new user's role via v2 - move VIEWER to ADMIN
change_user_role(signoz, admin_token, new_user_id, "signoz-viewer", "signoz-admin")
# Update display name via v2
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{new_user_id}"),
json={
"displayName": "role change user",
"role": "ADMIN",
},
signoz.self.host_configs["8080"].get(f"/api/v2/users/{new_user_id}"),
json={"displayName": "role change user"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.NO_CONTENT
assert response.status_code == HTTPStatus.OK
# Make some API calls again
# Verify user can now access admin endpoints
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
timeout=2,
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {new_user_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
me_data = response.json()["data"]
assert me_data is not None
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/org/preferences"),
@@ -84,3 +96,303 @@ def test_change_role(
)
assert response.status_code == HTTPStatus.OK
def test_get_user_roles(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify GET /api/v2/users/{id}/roles returns correct roles."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# admin+rolechange user was promoted to ADMIN in test_change_role
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
me = response.json()["data"]
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{me['id']}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
roles = response.json()["data"]
assert len(roles) >= 1
assert "signoz-admin" in {r["name"] for r in roles}
# verify role object shape
for role in roles:
assert "id" in role
assert "name" in role
assert "type" in role
def test_assign_role_replaces_previous(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify POST /api/v2/users/{id}/roles replaces existing role."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
me = response.json()["data"]
user_id = me["id"]
response = requests.post(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
json={"name": "signoz-editor"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
roles = response.json()["data"]
names = {r["name"] for r in roles}
assert "signoz-editor" in names
assert "signoz-admin" not in names
def test_get_users_by_role(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify GET /api/v2/roles/{role_id}/users returns users with that role."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
me = response.json()["data"]
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{me['id']}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
roles = response.json()["data"]
editor_role_id = next((r for r in roles if r["name"] == "signoz-editor"), None)[
"id"
]
assert editor_role_id is not None
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/roles/{editor_role_id}/users"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
user_emails = {u["email"] for u in response.json()["data"]}
assert ROLECHANGE_USER_EMAIL in user_emails
def test_remove_role(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify DELETE /api/v2/users/{id}/roles/{roleId} removes the role."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
me = response.json()["data"]
user_id = me["id"]
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
roles = response.json()["data"]
editor_role_id = next((r for r in roles if r["name"] == "signoz-editor"), None)[
"id"
]
assert editor_role_id is not None
response = requests.delete(
signoz.self.host_configs["8080"].get(
f"/api/v2/users/{user_id}/roles/{editor_role_id}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
roles_after = response.json()["data"]
assert len(roles_after) == 0
def test_user_with_roles_reflects_change(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify GET /api/v2/users/{id} userRoles reflects role removal."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={
"Authorization": f"Bearer {get_token(ROLECHANGE_USER_EMAIL, ROLECHANGE_USER_PASSWORD)}"
},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
me = response.json()["data"]
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{me['id']}"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
data = response.json()["data"]
role_names = {ur["role"]["name"] for ur in data["userRoles"]}
assert len(role_names) == 0
def test_admin_cannot_assign_role_to_self(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify POST /api/v2/users/{own_id}/roles is rejected (self-mutation guard)."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
admin_data = response.json()["data"]
response = requests.post(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_data['id']}/roles"),
json={"name": "signoz-editor"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_admin_cannot_remove_own_role(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify DELETE /api/v2/users/{own_id}/roles/{roleId} is rejected (self-mutation guard)."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
admin_data = response.json()["data"]
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_data['id']}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
roles = response.json()["data"]
admin_role_id = next((r for r in roles if r["name"] == "signoz-admin"), None)["id"]
assert admin_role_id is not None
response = requests.delete(
signoz.self.host_configs["8080"].get(
f"/api/v2/users/{admin_data['id']}/roles/{admin_role_id}"
),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_editor_cannot_manage_roles(
signoz: types.SigNoz,
get_token: Callable[[str, str], str],
):
"""Verify non-admin cannot call role management endpoints."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# create a viewer user to be the target
viewer_id = create_active_user(
signoz,
admin_token,
email="viewer+roleauth@integration.test",
role="VIEWER",
password=ROLECHANGE_USER_PASSWORD,
name="viewer roleauth",
)
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
# GET roles — forbidden
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{viewer_id}/roles"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.FORBIDDEN
# POST assign role — forbidden
response = requests.post(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{viewer_id}/roles"),
json={"name": "signoz-editor"},
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.FORBIDDEN
# DELETE remove role — forbidden
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{viewer_id}/roles"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
viewer_roles = response.json()["data"]
viewer_role_id = next(
(r for r in viewer_roles if r["name"] == "signoz-viewer"), None
)["id"]
response = requests.delete(
signoz.self.host_configs["8080"].get(
f"/api/v2/users/{viewer_id}/roles/{viewer_role_id}"
),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.FORBIDDEN

View File

@@ -27,7 +27,7 @@ def test_duplicate_user_invite_rejected(
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
reset_token = invited_user["token"]

View File

@@ -4,6 +4,7 @@ from typing import Callable
import requests
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.authutils import create_active_user
from fixtures.types import SigNoz
@@ -37,7 +38,7 @@ def test_reinvite_deleted_user(
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
invited_user = response.json()["data"]
reset_token = invited_user["token"]
@@ -68,7 +69,7 @@ def test_reinvite_deleted_user(
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
reinvited_user = response.json()["data"]
assert reinvited_user["role"] == "VIEWER"
assert reinvited_user["id"] != invited_user["id"] # confirms a new user was created
@@ -118,4 +119,67 @@ def test_bulk_invite(
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.CREATED
assert response.status_code == HTTPStatus.CREATED, response.text
def test_delete_user(
signoz: SigNoz,
get_token: Callable[[str, str], str],
):
"""
Verify that after soft-deleting a user:
1. GET /api/v2/users shows the user with status == "deleted"
2. GET /api/v2/users/{id} returns the user with empty userRoles (roles revoked)
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
user_id = create_active_user(
signoz,
admin_token,
email="delete-verify-v2@integration.test",
role="EDITOR",
password="password123Z$",
name="delete verify v2",
)
# verify user is active via v2
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
data = response.json()["data"]
assert data["status"] == "active"
assert len(data["userRoles"]) == 1
# delete the user
response = requests.delete(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{user_id}"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT
# verify status is deleted in the users list
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
deleted_user = next((u for u in users if u["id"] == user_id), None)
assert deleted_user is not None
assert deleted_user["status"] == "deleted"
# verify roles are revoked
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{user_id}"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
data = response.json()["data"]
assert data["status"] == "deleted"
assert len(data["userRoles"]) == 1

View File

@@ -18,13 +18,13 @@ def test_unique_index_allows_multiple_deleted_rows(
get_token: Callable[[str, str], str],
):
"""
Verify that the composite unique index on (org_id, email, deleted_at) allows multiple
deleted rows for the same (org_id, email) while still enforcing uniqueness among
non-deleted rows.
Verify that the partial unique index on (email, org_id) WHERE status != 'deleted'
allows multiple deleted rows for the same (org_id, email) while still enforcing
uniqueness among non-deleted rows.
Non-deleted users share deleted_at=zero-time, so the unique index prevents duplicates.
Soft-deleted users each have a distinct deleted_at timestamp, so the index allows
multiple deleted rows for the same (org_id, email).
The partial unique index only covers rows where status != 'deleted', so active
users cannot share the same (org_id, email). Deleted users are excluded from
the index, allowing multiple deleted rows for the same (org_id, email).
Steps:
1. Invite and soft-delete a user via the API (first deleted row).
@@ -32,9 +32,9 @@ def test_unique_index_allows_multiple_deleted_rows(
3. Assert via SQL that exactly two deleted rows exist for the email.
4. Assert via SQL that inserting one active row succeeds (no conflict — only
deleted rows exist), then inserting a second active row for the same
(org_id, email) fails with a unique constraint error (both have deleted_at=zero-time).
(org_id, email) fails with a unique constraint error.
5. Assert via SQL that inserting a third deleted row for the same (org_id, email)
with a unique deleted_at succeeds — confirming the index does not cover deleted rows.
succeeds — confirming the index does not cover deleted rows.
6. Assert via SQL that the final count of deleted rows is 3.
"""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
@@ -50,7 +50,7 @@ def test_unique_index_allows_multiple_deleted_rows(
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert resp.status_code == HTTPStatus.CREATED
assert resp.status_code == HTTPStatus.CREATED, resp.text
first_user_id = resp.json()["data"]["id"]
resp = requests.delete(
@@ -71,7 +71,7 @@ def test_unique_index_allows_multiple_deleted_rows(
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert resp.status_code == HTTPStatus.CREATED
assert resp.status_code == HTTPStatus.CREATED, resp.text
second_user_id = resp.json()["data"]["id"]
assert second_user_id != first_user_id
@@ -86,10 +86,9 @@ def test_unique_index_allows_multiple_deleted_rows(
with signoz.sqlstore.conn.connect() as conn:
result = conn.execute(
sql.text(
"SELECT id, deleted_at FROM users"
" WHERE email = :email AND deleted_at != :zero_time"
"SELECT id FROM users WHERE email = :email AND status = 'deleted'"
),
{"email": UNIQUE_INDEX_USER_EMAIL, "zero_time": "0001-01-01 00:00:00"},
{"email": UNIQUE_INDEX_USER_EMAIL},
)
deleted_rows = result.fetchall()
@@ -109,24 +108,24 @@ def test_unique_index_allows_multiple_deleted_rows(
org_id = result.fetchone()[0]
# Step 4: the unique index must still block a duplicate non-deleted row.
# Both active rows have deleted_at=zero-time, so they share the same (org_id, email, zero-time)
# tuple. First insert must succeed (only deleted rows exist so far).
# Second insert for the same (org_id, email) with deleted_at=zero-time must fail.
# The partial unique index covers rows WHERE status != 'deleted', so two active
# rows with the same (org_id, email) must conflict.
# First insert must succeed (only deleted rows exist so far).
# Second insert for the same (org_id, email) with status='active' must fail.
active_id = str(uuid.uuid4())
with signoz.sqlstore.conn.connect() as conn:
conn.execute(
sql.text(
"INSERT INTO users"
" (id, display_name, email, org_id, is_root, status, created_at, updated_at, deleted_at)"
" (id, display_name, email, org_id, is_root, status, created_at, updated_at)"
" VALUES (:id, :display_name, :email, :org_id,"
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, :zero_time)"
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
),
{
"id": active_id,
"display_name": "first active row",
"email": UNIQUE_INDEX_USER_EMAIL,
"org_id": org_id,
"zero_time": "0001-01-01 00:00:00",
},
)
conn.commit()
@@ -136,27 +135,26 @@ def test_unique_index_allows_multiple_deleted_rows(
conn.execute(
sql.text(
"INSERT INTO users"
" (id, display_name, email, org_id, is_root, status, created_at, updated_at, deleted_at)"
" (id, display_name, email, org_id, is_root, status, created_at, updated_at)"
" VALUES (:id, :display_name, :email, :org_id,"
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, :zero_time)"
" false, 'active', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
),
{
"id": str(uuid.uuid4()),
"display_name": "should violate index",
"email": UNIQUE_INDEX_USER_EMAIL,
"org_id": org_id,
"zero_time": "0001-01-01 00:00:00",
},
)
# Step 5: a third deleted row with a unique deleted_at must be accepted
# Step 5: a third deleted row must be accepted (excluded from partial index)
with signoz.sqlstore.conn.connect() as conn:
conn.execute(
sql.text(
"INSERT INTO users"
" (id, display_name, email, org_id, is_root, status, created_at, updated_at, deleted_at)"
" (id, display_name, email, org_id, is_root, status, created_at, updated_at)"
" VALUES (:id, :display_name, :email, :org_id,"
" false, 'deleted', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
" false, 'deleted', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)"
),
{
"id": str(uuid.uuid4()),
@@ -172,9 +170,9 @@ def test_unique_index_allows_multiple_deleted_rows(
result = conn.execute(
sql.text(
"SELECT COUNT(*) FROM users"
" WHERE email = :email AND deleted_at != :zero_time"
" WHERE email = :email AND status = 'deleted'"
),
{"email": UNIQUE_INDEX_USER_EMAIL, "zero_time": "0001-01-01 00:00:00"},
{"email": UNIQUE_INDEX_USER_EMAIL},
)
count = result.fetchone()[0]

View File

@@ -0,0 +1,203 @@
from http import HTTPStatus
from typing import Callable
import requests
from fixtures import types
from fixtures.auth import (
USER_ADMIN_EMAIL,
USER_ADMIN_PASSWORD,
USER_EDITOR_EMAIL,
USER_EDITOR_PASSWORD,
)
from fixtures.authutils import (
assert_user_has_role,
find_user_by_email,
find_user_with_roles_by_email,
)
def test_list_users(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
"""Verify GET /api/v2/users returns all users with correct fields."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
users = response.json()["data"]
admin_user = next((u for u in users if u["email"] == USER_ADMIN_EMAIL), None)
assert admin_user is not None
assert admin_user["isRoot"] is True
assert admin_user["status"] == "active"
editor_user = next((u for u in users if u["email"] == USER_EDITOR_EMAIL), None)
assert editor_user is not None
assert editor_user["status"] == "active"
def test_get_user(signoz: types.SigNoz, get_token: Callable[[str, str], str]) -> None:
"""Verify GET /api/v2/users/{id} returns user with roles."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
editor_user = find_user_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{editor_user['id']}"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
data = response.json()["data"]
assert data["email"] == USER_EDITOR_EMAIL
assert data["status"] == "active"
assert len(data["userRoles"]) >= 1
assert_user_has_role(data, "signoz-editor")
def test_get_my_user(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify GET /api/v2/users/me returns authenticated user with roles."""
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
data = response.json()["data"]
assert data["email"] == USER_EDITOR_EMAIL
assert data["status"] == "active"
assert data["isRoot"] is False
assert_user_has_role(data, "signoz-editor")
def test_update_user(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify PUT /api/v2/users/{id} updates displayName."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
editor_user = find_user_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{editor_user['id']}"),
json={"displayName": "updated editor"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT
updated = find_user_with_roles_by_email(signoz, admin_token, USER_EDITOR_EMAIL)
assert updated["displayName"] == "updated editor"
def test_update_my_user(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify PUT /api/v2/users/me updates own displayName."""
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
response = requests.put(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
json={"displayName": "self updated editor"},
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.NO_CONTENT
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
assert response.json()["data"]["displayName"] == "self updated editor"
def test_admin_cannot_update_self_via_id(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify PUT /api/v2/users/{own_id} is rejected (self-mutation guard)."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
admin_id = response.json()["data"]["id"]
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_id}"),
json={"displayName": "should fail"},
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.BAD_REQUEST
def test_editor_cannot_list_users(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify non-admin cannot call GET /api/v2/users."""
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.FORBIDDEN
def test_editor_cannot_get_other_user(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify non-admin cannot call GET /api/v2/users/{other_id}."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
admin_id = response.json()["data"]["id"]
response = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_id}"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.FORBIDDEN
def test_editor_cannot_update_other_user(
signoz: types.SigNoz, get_token: Callable[[str, str], str]
) -> None:
"""Verify non-admin cannot call PUT /api/v2/users/{other_id}."""
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.OK
admin_id = response.json()["data"]["id"]
response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{admin_id}"),
json={"displayName": "hacked"},
headers={"Authorization": f"Bearer {editor_token}"},
timeout=5,
)
assert response.status_code == HTTPStatus.FORBIDDEN

View File

@@ -50,14 +50,15 @@ def test_root_user_signoz_admin_assignment(
):
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Get the user from the /user/me endpoint and extract the id
user_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
# Get the user from the v2 /users/me endpoint and extract the id
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
timeout=5,
)
assert user_response.status_code == HTTPStatus.OK
user_id = user_response.json()["data"]["id"]
assert response.status_code == HTTPStatus.OK
user_data = response.json()["data"]
user_id = user_data["id"]
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/roles"),

View File

@@ -11,6 +11,7 @@ from fixtures.auth import (
USER_EDITOR_EMAIL,
USER_EDITOR_PASSWORD,
)
from fixtures.authutils import change_user_role
from fixtures.types import Operation, SigNoz
@@ -46,13 +47,14 @@ def test_user_invite_accept_role_grant(
# Login with editor email and password
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
user_me_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=2,
timeout=5,
)
assert user_me_response.status_code == HTTPStatus.OK
editor_id = user_me_response.json()["data"]["id"]
assert response.status_code == HTTPStatus.OK
editor_data = response.json()["data"]
editor_id = editor_data["id"]
# check the forbidden response for admin api for editor user
admin_roles_response = requests.get(
@@ -101,13 +103,14 @@ def test_user_update_role_grant(
):
# Get the editor user's id
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
user_me_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=2,
timeout=5,
)
assert user_me_response.status_code == HTTPStatus.OK
editor_id = user_me_response.json()["data"]["id"]
assert response.status_code == HTTPStatus.OK
editor_data = response.json()["data"]
editor_id = editor_data["id"]
# Get the role id for viewer
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
@@ -120,15 +123,8 @@ def test_user_update_role_grant(
roles_data = roles_response.json()["data"]
org_id = roles_data[0]["orgId"]
# Update the user's role to viewer
update_payload = {"role": "VIEWER"}
update_response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{editor_id}"),
json=update_payload,
headers={"Authorization": f"Bearer {admin_token}"},
timeout=2,
)
assert update_response.status_code == HTTPStatus.OK
# Update the user's role to viewer via v2 role endpoints
change_user_role(signoz, admin_token, editor_id, "signoz-editor", "signoz-viewer")
# Check that user no longer has the editor role in the db
with signoz.sqlstore.conn.connect() as conn:
@@ -178,13 +174,14 @@ def test_user_delete_role_revoke(
):
# login with editor to get the user_id and check if user exists
editor_token = get_token(USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD)
user_me_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {editor_token}"},
timeout=2,
timeout=5,
)
assert user_me_response.status_code == HTTPStatus.OK
editor_id = user_me_response.json()["data"]["id"]
assert response.status_code == HTTPStatus.OK
editor_data = response.json()["data"]
editor_id = editor_data["id"]
# delete the editor user
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)

View File

@@ -14,7 +14,7 @@ def test_root_user_created(signoz: types.SigNoz) -> None:
The root user service reconciles asynchronously after startup.
Phase 1: Poll /api/v1/version until setupCompleted=true.
Phase 2: Poll /api/v1/user until it returns 200, confirming the root
Phase 2: Poll /api/v2/users until it returns 200, confirming the root
user actually exists and the impersonation provider works.
"""
# Phase 1: wait for setupCompleted
@@ -39,13 +39,13 @@ def test_root_user_created(signoz: types.SigNoz) -> None:
# Phase 2: wait for root user to be fully resolved
for attempt in range(15):
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v2/users"),
timeout=2,
)
if response.status_code == HTTPStatus.OK:
return
logger.info(
"Attempt %s: /api/v1/user returned %s, retrying ...",
"Attempt %s: /api/v2/users returned %s, retrying ...",
attempt + 1,
response.status_code,
)

View File

@@ -3,6 +3,7 @@ from http import HTTPStatus
import requests
from fixtures import types
from fixtures.authutils import assert_user_has_role
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@@ -32,7 +33,7 @@ def test_impersonated_user_is_admin(signoz: types.SigNoz) -> None:
Listing users is an admin-only endpoint.
"""
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user"),
signoz.self.host_configs["8080"].get("/api/v2/users"),
timeout=2,
)
@@ -46,4 +47,11 @@ def test_impersonated_user_is_admin(signoz: types.SigNoz) -> None:
None,
)
assert root_user is not None
assert root_user["role"] == "ADMIN"
# Verify root user has admin role via v2 detail endpoint
root_detail = requests.get(
signoz.self.host_configs["8080"].get(f"/api/v2/users/{root_user['id']}"),
timeout=2,
)
assert root_detail.status_code == HTTPStatus.OK
assert_user_has_role(root_detail.json()["data"], "signoz-admin")

View File

@@ -38,12 +38,12 @@ def test_service_account_key_forbidden_on_user_me(
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""Service account key must not access /api/v1/user/me — it's user-only."""
"""Service account key must not access /api/v2/users/me — it's user-only."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
_, api_key = create_service_account_with_key(signoz, token, "sa-user-me-test")
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"SIGNOZ-API-KEY": api_key},
timeout=5,
)
@@ -51,7 +51,7 @@ def test_service_account_key_forbidden_on_user_me(
## This shouldn't be allowed on api key identn, will be updated once we fix that.
assert (
response.status_code == HTTPStatus.NOT_FOUND
), f"Expected 404 for service account on /user/me, got {response.status_code}: {response.text}"
), f"Expected 404 for service account on /users/me, got {response.status_code}: {response.text}"
def test_service_account_key_forbidden_on_user_preferences(
@@ -311,7 +311,7 @@ def test_user_token_still_works_on_user_me(
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/user/me"),
signoz.self.host_configs["8080"].get("/api/v2/users/me"),
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)

View File

@@ -48,7 +48,7 @@ def test_assign_role_to_service_account(
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""POST /{id}/roles assigns a new role, verify via GET."""
"""POST /{id}/roles replaces existing role, verify via GET."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# create service account with viewer role
@@ -56,7 +56,7 @@ def test_assign_role_to_service_account(
signoz, token, "sa-assign-role", role="signoz-viewer"
)
# assign editor role additionally
# assign editor role (replaces viewer)
editor_role_id = find_role_by_name(signoz, token, "signoz-editor")
assign_resp = requests.post(
signoz.self.host_configs["8080"].get(
@@ -68,7 +68,7 @@ def test_assign_role_to_service_account(
)
assert assign_resp.status_code == HTTPStatus.NO_CONTENT, assign_resp.text
# verify both roles are present
# verify only editor role is present (viewer was replaced)
roles_resp = requests.get(
signoz.self.host_configs["8080"].get(
f"{SERVICE_ACCOUNT_BASE}/{service_account_id}/roles"
@@ -78,8 +78,9 @@ def test_assign_role_to_service_account(
)
assert roles_resp.status_code == HTTPStatus.OK, roles_resp.text
role_names = [r["name"] for r in roles_resp.json()["data"]]
assert "signoz-viewer" in role_names
assert len(role_names) == 1
assert "signoz-editor" in role_names
assert "signoz-viewer" not in role_names
def test_assign_role_idempotent(
@@ -87,7 +88,7 @@ def test_assign_role_idempotent(
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""POST same role twice succeeds (store uses ON CONFLICT DO NOTHING)."""
"""POST same role twice succeeds (replace with same role is idempotent)."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
service_account_id = create_service_account(
signoz, token, "sa-role-idempotent", role="signoz-viewer"
@@ -118,6 +119,66 @@ def test_assign_role_idempotent(
assert role_names.count("signoz-viewer") == 1
def test_assign_role_replaces_access(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
"""After role replacement, SA loses old permissions and gains new ones."""
token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# create SA with viewer role and an API key
service_account_id, api_key = create_service_account_with_key(
signoz, token, "sa-role-replace-access", role="signoz-viewer"
)
# viewer should get 403 on admin-only endpoint
resp = requests.get(
signoz.self.host_configs["8080"].get(SERVICE_ACCOUNT_BASE),
headers={"SIGNOZ-API-KEY": api_key},
timeout=5,
)
assert (
resp.status_code == HTTPStatus.FORBIDDEN
), f"Expected 403 for viewer on admin endpoint, got {resp.status_code}: {resp.text}"
# assign admin role (replaces viewer)
admin_role_id = find_role_by_name(signoz, token, "signoz-admin")
assign_resp = requests.post(
signoz.self.host_configs["8080"].get(
f"{SERVICE_ACCOUNT_BASE}/{service_account_id}/roles"
),
json={"id": admin_role_id},
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)
assert assign_resp.status_code == HTTPStatus.NO_CONTENT, assign_resp.text
# SA should now have admin access
resp = requests.get(
signoz.self.host_configs["8080"].get(SERVICE_ACCOUNT_BASE),
headers={"SIGNOZ-API-KEY": api_key},
timeout=5,
)
assert (
resp.status_code == HTTPStatus.OK
), f"Expected 200 for admin on admin endpoint, got {resp.status_code}: {resp.text}"
# verify only admin role is present
roles_resp = requests.get(
signoz.self.host_configs["8080"].get(
f"{SERVICE_ACCOUNT_BASE}/{service_account_id}/roles"
),
headers={"Authorization": f"Bearer {token}"},
timeout=5,
)
assert roles_resp.status_code == HTTPStatus.OK, roles_resp.text
role_names = [r["name"] for r in roles_resp.json()["data"]]
assert len(role_names) == 1
assert "signoz-admin" in role_names
assert "signoz-viewer" not in role_names
def test_remove_role_from_service_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument