Compare commits

...

17 Commits

Author SHA1 Message Date
grandwizard28
3de8b3b5b2 feat: toggle foreign key constraint 2026-02-16 16:54:12 +05:30
grandwizard28
a3d74b19fc feat: handle setup completed 2026-02-16 15:59:10 +05:30
grandwizard28
7edd97b3d2 feat: add org_name to root user reconciliation 2026-02-16 15:59:09 +05:30
grandwizard28
17958d8aa0 fix: add openapi spec 2026-02-16 15:59:09 +05:30
grandwizard28
f8341790e9 fix: clean the update functions and the reconcilliation logic 2026-02-16 15:59:09 +05:30
grandwizard28
e84c6749b2 feat: fix promote to root and authz grant changes where needed 2026-02-16 15:59:09 +05:30
grandwizard28
65fd5f274c feat: add root user support with protection guards and env-based provisioning 2026-02-16 15:59:09 +05:30
Ashwin Bhatkal
8898f02698 chore: variables based panel fetching (#10292)
* chore: replace prop drilling with fetch store

* chore: variables based panel fetching

* chore: add tests

* chore: move tests

* chore: add tests for new hook

* chore: resolve comments
2026-02-16 09:11:06 +00:00
Ashwin Bhatkal
f277009ff8 chore: replace prop drilling with fetch store (#10291)
* chore: shared utils update + API plumbing

* chore: variable fetch state machine

* chore: add tests

* chore: add tests

* chore: move tests

* chore: fix tests

* chore: replace prop drilling with fetch store

* chore: fix types

* chore: add tests for new utils

* chore: resolve comments
2026-02-16 08:50:08 +00:00
Abhi kumar
17c6b79d79 Revert "feat: enabled new bar panel (#10312)" (#10314)
This reverts commit 82dffdda56.
2026-02-16 14:09:19 +05:30
Abhi kumar
76d6c23217 fix: added fix for bar chart width calculation based on stepinterval (#10305) 2026-02-16 13:00:20 +05:30
Abhi kumar
82dffdda56 feat: enabled new bar panel (#10312) 2026-02-16 12:43:45 +05:30
Abhi kumar
8f38398863 test: added tests for barpanel hooks + utils (#10295)
* chore: refactored the config builder and added base config builder

* chore: added a common chart wrapper

* chore: tsc fix

* fix: pr review changes

* fix: pr review changes

* chore: added different tooltips

* chore: removed dayjs extention

* feat: added new barpanel component

* fix: added fix for pr review changes

* chore: added support for bar alignment configuration

* chore: updated structure for bar panel

* test: added tests for barpanel hooks + utils
2026-02-16 06:18:14 +00:00
Ashwin Bhatkal
eb39772d3c chore: variable fetch state machine (#10290)
* chore: shared utils update + API plumbing

* chore: variable fetch state machine

* chore: add tests

* chore: add tests

* chore: move tests

* chore: fix tests
2026-02-16 11:36:16 +05:30
Pandey
df72c897f9 feat: change invitation and password reset emails (#10297)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-02-15 14:59:54 +00:00
Abhishek Kumar Singh
4bbe5ead07 test(integration): alerts e2e test cases with basic rule manager alerts (#10163)
Some checks failed
build-staging / prepare (push) Has been cancelled
build-staging / js-build (push) Has been cancelled
build-staging / go-build (push) Has been cancelled
build-staging / staging (push) Has been cancelled
Release Drafter / update_release_draft (push) Has been cancelled
2026-02-14 22:32:50 +05:30
Yunus M
e36689ecba fix: show ip addresses toggle and add regression test (#10251) 2026-02-14 19:47:16 +05:30
143 changed files with 7322 additions and 925 deletions

6
.github/CODEOWNERS vendored
View File

@@ -43,6 +43,12 @@
/pkg/analytics/ @vikrantgupta25
/pkg/statsreporter/ @vikrantgupta25
# Emailing Owners
/pkg/emailing/ @vikrantgupta25
/pkg/types/emailtypes/ @vikrantgupta25
/templates/email/ @vikrantgupta25
# Querier Owners
/pkg/querier/ @srikanthccv

View File

@@ -14,5 +14,8 @@
},
"[sql]": {
"editor.defaultFormatter": "adpyke.vscode-sql-formatter"
},
"[html]": {
"editor.defaultFormatter": "vscode.html-language-features"
}
}

View File

@@ -193,6 +193,15 @@ emailing:
templates:
# The directory containing the email templates. This directory should contain a list of files defined at pkg/types/emailtypes/template.go.
directory: /opt/signoz/conf/templates/email
format:
header:
enabled: false
logo_url: ""
help:
enabled: false
email: ""
footer:
enabled: false
smtp:
# The SMTP server address.
address: localhost:25
@@ -300,3 +309,14 @@ user:
allow_self: true
# The duration within which a user can reset their password.
max_token_lifetime: 6h
root:
# Whether to enable the root user. When enabled, a root user is provisioned
# on startup using the email and password below. The root user cannot be
# deleted, updated, or have their password changed through the UI.
enabled: false
# The email address of the root user.
email: ""
# The password of the root user. Must meet password requirements.
password: ""
# The name of the organization to create or look up for the root user.
org_name: default

View File

@@ -4678,6 +4678,8 @@ components:
type: string
id:
type: string
isRoot:
type: boolean
orgId:
type: string
role:

View File

@@ -45,7 +45,7 @@ type APIHandler struct {
}
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler, error) {
func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz, config signoz.Config) (*APIHandler, error) {
baseHandler, err := baseapp.NewAPIHandler(baseapp.APIHandlerOpts{
Reader: opts.DataConnector,
RuleManager: opts.RulesManager,
@@ -58,7 +58,7 @@ func NewAPIHandler(opts APIHandlerOptions, signoz *signoz.SigNoz) (*APIHandler,
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
})
}, config)
if err != nil {
return nil, err

View File

@@ -175,7 +175,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
GlobalConfig: config.Global,
}
apiHandler, err := api.NewAPIHandler(apiOpts, signoz)
apiHandler, err := api.NewAPIHandler(apiOpts, signoz, config)
if err != nil {
return nil, err
}

View File

@@ -1542,6 +1542,10 @@ export interface TypesUserDTO {
* @type string
*/
id?: string;
/**
* @type boolean
*/
isRoot?: boolean;
/**
* @type string
*/

View File

@@ -73,7 +73,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'time_series',
data: { results: [timeSeries] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
};
const params = makeBaseParams('time_series', [
@@ -156,7 +156,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
};
const params = makeBaseParams('scalar', [
@@ -239,7 +239,7 @@ describe('convertV5ResponseToLegacy', () => {
const v5Data: QueryRangeResponseV5 = {
type: 'scalar',
data: { results: [scalar] },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0 },
meta: { rowsScanned: 0, bytesScanned: 0, durationMs: 0, stepIntervals: {} },
};
const params = makeBaseParams('scalar', [

View File

@@ -388,6 +388,7 @@ export function convertV5ResponseToLegacy(
warnings: v5Data?.data?.warning || [],
},
warning: v5Data?.warning || undefined,
meta: v5Data?.meta,
},
warning: v5Data?.warning || undefined,
};
@@ -406,6 +407,7 @@ export function convertV5ResponseToLegacy(
payload: {
data: convertedData,
warning: v5Response.payload?.data?.warning || undefined,
meta: v5Data?.meta,
},
};

View File

@@ -78,12 +78,10 @@ function TestWrapper({ children }: { children: React.ReactNode }): JSX.Element {
describe('VariableItem Integration Tests', () => {
let user: ReturnType<typeof userEvent.setup>;
let mockOnValueUpdate: jest.Mock;
let mockSetVariablesToGetUpdated: jest.Mock;
beforeEach(() => {
user = userEvent.setup();
mockOnValueUpdate = jest.fn();
mockSetVariablesToGetUpdated = jest.fn();
jest.clearAllMocks();
});
@@ -102,9 +100,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -150,9 +145,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -195,9 +187,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -247,9 +236,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -272,9 +258,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -308,9 +291,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -344,9 +324,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -369,9 +346,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -405,9 +379,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -461,9 +432,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -508,9 +476,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -548,9 +513,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);
@@ -582,9 +544,6 @@ describe('VariableItem Integration Tests', () => {
variableData={variable}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={null}
/>
</TestWrapper>,
);

View File

@@ -282,11 +282,11 @@ export default function QuickFilters(props: IQuickFiltersProps): JSX.Element {
size="small"
style={{ marginLeft: 'auto' }}
checked={showIP ?? true}
onClick={(): void => {
onChange={(checked): void => {
logEvent('API Monitoring: Show IP addresses clicked', {
showIP: !(showIP ?? true),
showIP: checked,
});
setParams({ showIP });
setParams({ showIP: checked });
}}
/>
</div>

View File

@@ -1,4 +1,8 @@
import { ENVIRONMENT } from 'constants/env';
import {
ApiMonitoringParams,
useApiMonitoringParams,
} from 'container/ApiMonitoring/queryParams';
import { useQueryBuilder } from 'hooks/queryBuilder/useQueryBuilder';
import {
otherFiltersResponse,
@@ -18,10 +22,15 @@ import { QuickFiltersConfig } from './constants';
jest.mock('hooks/queryBuilder/useQueryBuilder', () => ({
useQueryBuilder: jest.fn(),
}));
jest.mock('container/ApiMonitoring/queryParams');
const handleFilterVisibilityChange = jest.fn();
const redirectWithQueryBuilderData = jest.fn();
const putHandler = jest.fn();
const mockSetApiMonitoringParams = jest.fn() as jest.MockedFunction<
(newParams: Partial<ApiMonitoringParams>, replace?: boolean) => void
>;
const mockUseApiMonitoringParams = jest.mocked(useApiMonitoringParams);
const BASE_URL = ENVIRONMENT.baseURL;
const SIGNAL = SignalType.LOGS;
@@ -84,6 +93,28 @@ TestQuickFilters.defaultProps = {
config: QuickFiltersConfig,
};
function TestQuickFiltersApiMonitoring({
signal = SignalType.LOGS,
config = QuickFiltersConfig,
}: {
signal?: SignalType;
config?: IQuickFiltersConfig[];
}): JSX.Element {
return (
<QuickFilters
source={QuickFiltersSource.API_MONITORING}
config={config}
handleFilterVisibilityChange={handleFilterVisibilityChange}
signal={signal}
/>
);
}
TestQuickFiltersApiMonitoring.defaultProps = {
signal: '',
config: QuickFiltersConfig,
};
beforeAll(() => {
server.listen();
});
@@ -112,6 +143,10 @@ beforeEach(() => {
lastUsedQuery: 0,
redirectWithQueryBuilderData,
});
mockUseApiMonitoringParams.mockReturnValue([
{ showIP: true } as ApiMonitoringParams,
mockSetApiMonitoringParams,
]);
setupServer();
});
@@ -251,6 +286,24 @@ describe('Quick Filters', () => {
);
});
});
it('toggles Show IP addresses and updates API Monitoring params', async () => {
const user = userEvent.setup({ pointerEventsCheck: 0 });
render(<TestQuickFiltersApiMonitoring />);
// Switch should be rendered and initially checked
expect(screen.getByText('Show IP addresses')).toBeInTheDocument();
const toggle = screen.getByRole('switch');
expect(toggle).toHaveAttribute('aria-checked', 'true');
await user.click(toggle);
await waitFor(() => {
expect(mockSetApiMonitoringParams).toHaveBeenCalledWith(
expect.objectContaining({ showIP: false }),
);
});
});
});
describe('Quick Filters with custom filters', () => {

View File

@@ -9,11 +9,15 @@ import {
import useVariablesFromUrl from 'hooks/dashboard/useVariablesFromUrl';
import { useDashboard } from 'providers/Dashboard/Dashboard';
import { initializeDefaultVariables } from 'providers/Dashboard/initializeDefaultVariables';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { AppState } from 'store/reducers';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { GlobalReducer } from 'types/reducer/globalTime';
import { onUpdateVariableNode } from './util';
import VariableItem from './VariableItem';
import './DashboardVariableSelection.styles.scss';
@@ -22,8 +26,6 @@ function DashboardVariableSelection(): JSX.Element | null {
const {
setSelectedDashboard,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
} = useDashboard();
const { updateUrlVariable, getUrlVariables } = useVariablesFromUrl();
@@ -55,11 +57,14 @@ function DashboardVariableSelection(): JSX.Element | null {
[dependencyData?.order],
);
// Trigger refetch when dependency order changes or global time changes
// Initialize fetch store then start a new fetch cycle.
// Runs on dependency order changes, and time range changes.
useEffect(() => {
if (dependencyData?.order && dependencyData.order.length > 0) {
setVariablesToGetUpdated(dependencyData?.order || []);
}
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
initializeVariableFetchStore(allVariableNames);
enqueueFetchOfAllVariables();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dependencyOrderKey, minTime, maxTime]);
@@ -121,29 +126,14 @@ function DashboardVariableSelection(): JSX.Element | null {
return prev;
});
if (dependencyData) {
const updatedVariables: string[] = [];
onUpdateVariableNode(
name,
dependencyData.graph,
dependencyData.order,
(node) => updatedVariables.push(node),
);
setVariablesToGetUpdated((prev) => [
...new Set([...prev, ...updatedVariables.filter((v) => v !== name)]),
]);
} else {
setVariablesToGetUpdated((prev) => prev.filter((v) => v !== name));
}
// Cascade: enqueue query-type descendants for refetching
enqueueDescendantsOfVariable(name);
},
[
// This can be removed
dashboardVariables,
updateLocalStorageDashboardVariables,
dependencyData,
updateUrlVariable,
setSelectedDashboard,
setVariablesToGetUpdated,
],
);
@@ -158,9 +148,6 @@ function DashboardVariableSelection(): JSX.Element | null {
existingVariables={dashboardVariables}
variableData={variable}
onValueUpdate={onValueUpdate}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
);
})}

View File

@@ -2,18 +2,25 @@ import { memo, useCallback, useMemo, useState } from 'react';
import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import { getFieldValues } from 'api/dynamicVariables/getFieldValues';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { DEBOUNCE_DELAY } from 'constants/queryBuilderFilterConfig';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import useDebounce from 'hooks/useDebounce';
import { isEmpty } from 'lodash-es';
import { AppState } from 'store/reducers';
import { SuccessResponseV2 } from 'types/api';
import { FieldValueResponse } from 'types/api/dynamicVariables/getFieldValues';
import { GlobalReducer } from 'types/reducer/globalTime';
import { isRetryableError as checkIfRetryableError } from 'utils/errorUtils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { getOptionsForDynamicVariable } from './util';
import {
buildExistingDynamicVariableQuery,
extractErrorMessage,
getOptionsForDynamicVariable,
mergeUniqueStrings,
settleVariableFetch,
} from './util';
import { VariableItemProps } from './VariableItem';
import { dynamicVariableSelectStrategy } from './variableSelectStrategy/dynamicVariableSelectStrategy';
@@ -24,7 +31,6 @@ type DynamicVariableInputProps = Pick<
'variableData' | 'onValueUpdate' | 'existingVariables'
>;
// eslint-disable-next-line sonarjs/cognitive-complexity
function DynamicVariableInput({
variableData,
onValueUpdate,
@@ -55,14 +61,8 @@ function DynamicVariableInput({
const debouncedApiSearchText = useDebounce(apiSearchText, DEBOUNCE_DELAY);
// Build a memoized list of all currently available option strings (normalized + related)
const allAvailableOptionStrings = useMemo(
() => [
...new Set([
...optionsData.map((v) => v.toString()),
...relatedValues.map((v) => v.toString()),
]),
],
() => mergeUniqueStrings(optionsData, relatedValues),
[optionsData, relatedValues],
);
@@ -104,67 +104,24 @@ function DynamicVariableInput({
(state) => state.globalTime,
);
// existing query is the query made from the other dynamic variables around this one with there current values
// for e.g. k8s.namespace.name IN ["zeus", "gene"] AND doc_op_type IN ["test"]
// eslint-disable-next-line sonarjs/cognitive-complexity
const existingQuery = useMemo(() => {
if (!existingVariables || !variableData.dynamicVariablesAttribute) {
return '';
}
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const queryParts: string[] = [];
Object.entries(existingVariables).forEach(([, variable]) => {
// Skip the current variable being processed
if (variable.id === variableData.id) {
return;
}
// Only include dynamic variables that have selected values and are not selected as ALL
if (
variable.type === 'DYNAMIC' &&
variable.dynamicVariablesAttribute &&
variable.selectedValue &&
!isEmpty(variable.selectedValue) &&
(variable.showALLOption ? !variable.allSelected : true)
) {
const attribute = variable.dynamicVariablesAttribute;
const values = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = values
.filter((val) => val !== null && val !== undefined && val !== '')
.map((val) => val.toString());
if (validValues.length > 0) {
// Format values for query - wrap strings in quotes, keep numbers as is
const formattedValues = validValues.map((val) => {
// Check if value is a number
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val; // Keep as number
}
// Escape single quotes and wrap in quotes
return `'${val.replace(/'/g, "\\'")}'`;
});
if (formattedValues.length === 1) {
queryParts.push(`${attribute} = ${formattedValues[0]}`);
} else {
queryParts.push(`${attribute} IN [${formattedValues.join(', ')}]`);
}
}
}
});
return queryParts.join(' AND ');
}, [
existingVariables,
variableData.id,
variableData.dynamicVariablesAttribute,
]);
const existingQuery = useMemo(
() =>
buildExistingDynamicVariableQuery(
existingVariables,
variableData.id,
!!variableData.dynamicVariablesAttribute,
),
[existingVariables, variableData.id, variableData.dynamicVariablesAttribute],
);
// Wrap the hook's onDropdownVisibleChange to also track isDropdownOpen and handle cleanup
const handleSelectDropdownVisibilityChange = useCallback(
@@ -182,6 +139,73 @@ function DynamicVariableInput({
[onDropdownVisibleChange, optionsData, originalRelatedValues],
);
const handleQuerySuccess = useCallback(
(data: SuccessResponseV2<FieldValueResponse>): void => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Sync temp selection with latest API values when ALL is active and dropdown is open
if (variableData.allSelected && isDropdownOpen) {
const latestValues = mergeUniqueStrings(
newNormalizedValues,
newRelatedValues,
);
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
applyDefaultIfNeeded(
mergeUniqueStrings(newNormalizedValues, newRelatedValues),
);
}
settleVariableFetch(variableData.name, 'complete');
},
[
debouncedApiSearchText,
variableData.allSelected,
variableData.name,
isDropdownOpen,
tempSelection,
setTempSelection,
applyDefaultIfNeeded,
],
);
const handleQueryError = useCallback(
(error: { message?: string } | null): void => {
if (error) {
setErrorMessage(extractErrorMessage(error));
setIsRetryableError(checkIfRetryableError(error));
}
settleVariableFetch(variableData.name, 'failure');
},
[variableData.name],
);
const { isLoading, refetch } = useQuery(
[
REACT_QUERY_KEY.DASHBOARD_BY_ID,
@@ -192,13 +216,22 @@ function DynamicVariableInput({
debouncedApiSearchText,
variableData.dynamicVariablesSource,
variableData.dynamicVariablesAttribute,
variableFetchCycleId,
],
{
/*
* enabled if
* - we have dynamic variable source and attribute defined (ALWAYS)
* - AND
* - we're either still fetching variable options
* - OR
* - if variable is in idle state and we have already fetched options for it
**/
enabled:
variableData.type === 'DYNAMIC' &&
!!variableData.dynamicVariablesSource &&
!!variableData.dynamicVariablesAttribute,
queryFn: () =>
!!variableData.dynamicVariablesAttribute &&
(isVariableFetching || (isVariableSettled && hasVariableFetchedOnce)),
queryFn: ({ signal }) =>
getFieldValues(
variableData.dynamicVariablesSource?.toLowerCase() === 'all telemetry'
? undefined
@@ -211,70 +244,10 @@ function DynamicVariableInput({
minTime,
maxTime,
existingQuery,
signal,
),
onSuccess: (data) => {
const newNormalizedValues = data.data?.normalizedValues || [];
const newRelatedValues = data.data?.relatedValues || [];
if (!debouncedApiSearchText) {
setOptionsData(newNormalizedValues);
setIsComplete(data.data?.complete || false);
}
setFilteredOptionsData(newNormalizedValues);
setRelatedValues(newRelatedValues);
setOriginalRelatedValues(newRelatedValues);
// Only run auto-check logic when necessary to avoid performance issues
if (variableData.allSelected && isDropdownOpen) {
// Build the latest full list from API (normalized + related)
const latestValues = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
// Update temp selection to exactly reflect latest API values when ALL is active
const currentStrings = Array.isArray(tempSelection)
? tempSelection.map((v) => v.toString())
: tempSelection
? [tempSelection.toString()]
: [];
const areSame =
currentStrings.length === latestValues.length &&
latestValues.every((v) => currentStrings.includes(v));
if (!areSame) {
setTempSelection(latestValues);
}
}
// Apply default if no value is selected (e.g., new variable, first load)
if (!debouncedApiSearchText) {
const allNewOptions = [
...new Set([
...newNormalizedValues.map((v) => v.toString()),
...newRelatedValues.map((v) => v.toString()),
]),
];
applyDefaultIfNeeded(allNewOptions);
}
},
onError: (error: any) => {
if (error) {
let message = SOMETHING_WENT_WRONG;
if (error?.message) {
message = error?.message;
} else {
message =
'Please make sure configuration is valid and you have required setup and permissions';
}
setErrorMessage(message);
// Check if error is retryable (5xx) or not (4xx)
const isRetryable = checkIfRetryableError(error);
setIsRetryableError(isRetryable);
}
},
onSuccess: handleQuerySuccess,
onError: handleQueryError,
},
);
@@ -336,6 +309,8 @@ function DynamicVariableInput({
showRetryButton={isRetryableError}
showIncompleteDataMessage={!isComplete && filteredOptionsData.length > 0}
onSearch={handleSearch}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -3,8 +3,9 @@ import { useQuery } from 'react-query';
import { useSelector } from 'react-redux';
import dashboardVariablesQuery from 'api/dashboard/variables/dashboardVariablesQuery';
import { REACT_QUERY_KEY } from 'constants/reactQueryKeys';
import { useVariableFetchState } from 'hooks/dashboard/useVariableFetchState';
import sortValues from 'lib/dashboardVariables/sortVariableValues';
import { isArray, isString } from 'lodash-es';
import { isArray, isEmpty, isString } from 'lodash-es';
import { AppState } from 'store/reducers';
import { VariableResponseProps } from 'types/api/dashboard/variables/query';
import { GlobalReducer } from 'types/reducer/globalTime';
@@ -12,26 +13,18 @@ import { GlobalReducer } from 'types/reducer/globalTime';
import { variablePropsToPayloadVariables } from '../utils';
import SelectVariableInput from './SelectVariableInput';
import { useDashboardVariableSelectHelper } from './useDashboardVariableSelectHelper';
import { areArraysEqual, checkAPIInvocation } from './util';
import { areArraysEqual, settleVariableFetch } from './util';
import { VariableItemProps } from './VariableItem';
import { queryVariableSelectStrategy } from './variableSelectStrategy/queryVariableSelectStrategy';
type QueryVariableInputProps = Pick<
VariableItemProps,
| 'variableData'
| 'existingVariables'
| 'onValueUpdate'
| 'variablesToGetUpdated'
| 'setVariablesToGetUpdated'
| 'dependencyData'
'variableData' | 'existingVariables' | 'onValueUpdate'
>;
function QueryVariableInput({
variableData,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
onValueUpdate,
}: QueryVariableInputProps): JSX.Element {
const [optionsData, setOptionsData] = useState<(string | number | boolean)[]>(
@@ -43,6 +36,15 @@ function QueryVariableInput({
(state) => state.globalTime,
);
const {
variableFetchCycleId,
isVariableSettled,
isVariableFetching,
hasVariableFetchedOnce,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
} = useVariableFetchState(variableData.name || '');
const {
tempSelection,
setTempSelection,
@@ -60,16 +62,6 @@ function QueryVariableInput({
strategy: queryVariableSelectStrategy,
});
const validVariableUpdate = useCallback((): boolean => {
if (!variableData.name) {
return false;
}
return Boolean(
variablesToGetUpdated.length &&
variablesToGetUpdated[0] === variableData.name,
);
}, [variableData.name, variablesToGetUpdated]);
const getOptions = useCallback(
// eslint-disable-next-line sonarjs/cognitive-complexity
(variablesRes: VariableResponseProps | null): void => {
@@ -103,18 +95,24 @@ function QueryVariableInput({
valueNotInList = true;
}
// variablesData.allSelected is added for the case where on change of options we need to update the
// local storage
if (
variableData.name &&
(validVariableUpdate() || valueNotInList || variableData.allSelected)
) {
if (variableData.name && (valueNotInList || variableData.allSelected)) {
if (
variableData.allSelected &&
variableData.multiSelect &&
variableData.showALLOption
) {
onValueUpdate(variableData.name, variableData.id, newOptionsData, true);
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(
variableData.name,
variableData.id,
newOptionsData,
true,
);
}
// Update tempSelection to maintain ALL state when dropdown is open
if (tempSelection !== undefined) {
@@ -132,7 +130,11 @@ function QueryVariableInput({
newOptionsData.every((option) => selectedValue.includes(option));
}
if (variableData.name && variableData.id) {
if (
variableData.name &&
variableData.id &&
!isEmpty(variableData.selectedValue)
) {
onValueUpdate(variableData.name, variableData.id, value, allSelected);
}
}
@@ -141,10 +143,6 @@ function QueryVariableInput({
setOptionsData(newOptionsData);
// Apply default if no value is selected (e.g., new variable, first load)
applyDefaultIfNeeded(newOptionsData);
} else {
setVariablesToGetUpdated((prev) =>
prev.filter((name) => name !== variableData.name),
);
}
}
} catch (e) {
@@ -157,8 +155,6 @@ function QueryVariableInput({
onValueUpdate,
tempSelection,
setTempSelection,
validVariableUpdate,
setVariablesToGetUpdated,
applyDefaultIfNeeded,
],
);
@@ -169,27 +165,28 @@ function QueryVariableInput({
variableData.name || '',
`${minTime}`,
`${maxTime}`,
JSON.stringify(dependencyData?.order),
variableFetchCycleId,
],
{
enabled:
variableData &&
checkAPIInvocation(
variablesToGetUpdated,
variableData,
dependencyData?.parentDependencyGraph,
/*
* enabled if
* - we're either still fetching variable options
* - OR
* - if variable is in idle state and we have already fetched options for it
**/
enabled: isVariableFetching || (isVariableSettled && hasVariableFetchedOnce),
queryFn: ({ signal }) =>
dashboardVariablesQuery(
{
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
},
signal,
),
queryFn: () =>
dashboardVariablesQuery({
query: variableData.queryValue || '',
variables: variablePropsToPayloadVariables(existingVariables),
}),
refetchOnWindowFocus: false,
onSuccess: (response) => {
getOptions(response.payload);
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
settleVariableFetch(variableData.name, 'complete');
},
onError: (error: {
details: {
@@ -206,9 +203,7 @@ function QueryVariableInput({
}
setErrorMessage(message);
}
setVariablesToGetUpdated((prev) =>
prev.filter((v) => v !== variableData.name),
);
settleVariableFetch(variableData.name, 'failure');
},
},
);
@@ -242,6 +237,8 @@ function QueryVariableInput({
loading={isLoading}
errorMessage={errorMessage}
onRetry={handleRetry}
waiting={isVariableWaitingForDependencies}
waitingMessage={variableDependencyWaitMessage}
/>
);
}

View File

@@ -28,6 +28,8 @@ interface SelectVariableInputProps {
showRetryButton?: boolean;
showIncompleteDataMessage?: boolean;
onSearch?: (searchTerm: string) => void;
waiting?: boolean;
waitingMessage?: string;
}
const MAX_TAG_DISPLAY_VALUES = 10;
@@ -65,6 +67,7 @@ function SelectVariableInput({
showRetryButton,
showIncompleteDataMessage,
onSearch,
waitingMessage,
}: SelectVariableInputProps): JSX.Element {
const commonProps = useMemo(
() => ({
@@ -78,7 +81,6 @@ function SelectVariableInput({
className: 'variable-select',
popupClassName: 'dropdown-styles',
getPopupContainer: popupContainer,
style: SelectItemStyle,
showSearch: true,
bordered: false,
@@ -86,6 +88,8 @@ function SelectVariableInput({
'data-testid': 'variable-select',
onChange,
loading,
waitingMessage,
style: SelectItemStyle,
options,
errorMessage,
onRetry,
@@ -101,6 +105,7 @@ function SelectVariableInput({
defaultValue,
onChange,
loading,
waitingMessage,
options,
value,
errorMessage,

View File

@@ -47,14 +47,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -69,14 +61,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -92,14 +76,6 @@ describe('VariableItem', () => {
variableData={mockVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -133,14 +109,6 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -163,14 +131,6 @@ describe('VariableItem', () => {
variableData={customVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);
@@ -185,14 +145,6 @@ describe('VariableItem', () => {
variableData={mockCustomVariableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={(): void => {}}
dependencyData={{
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
}}
/>
</MockQueryClientProvider>,
);

View File

@@ -1,7 +1,6 @@
import { memo } from 'react';
import { InfoCircleOutlined } from '@ant-design/icons';
import { Tooltip, Typography } from 'antd';
import { IDependencyData } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import CustomVariableInput from './CustomVariableInput';
@@ -21,18 +20,12 @@ export interface VariableItemProps {
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dependencyData: IDependencyData | null;
}
function VariableItem({
variableData,
onValueUpdate,
existingVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dependencyData,
}: VariableItemProps): JSX.Element {
const { name, description, type: variableType } = variableData;
@@ -65,9 +58,6 @@ function VariableItem({
variableData={variableData}
onValueUpdate={onValueUpdate}
existingVariables={existingVariables}
variablesToGetUpdated={variablesToGetUpdated}
setVariablesToGetUpdated={setVariablesToGetUpdated}
dependencyData={dependencyData}
/>
)}
{variableType === 'DYNAMIC' && (

View File

@@ -7,6 +7,19 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DynamicVariableInput';
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
useVariableFetchState: (): Record<string, unknown> => ({
variableFetchCycleId: 0,
variableFetchState: 'loading',
isVariableSettled: false,
isVariableFetching: true,
hasVariableFetchedOnce: false,
isVariableWaitingForDependencies: false,
variableDependencyWaitMessage: '',
}),
}));
// Don't mock the components - use real ones
// Mock for useQuery
@@ -217,9 +230,10 @@ describe('DynamicVariableInput Component', () => {
'',
'Traces',
'service.name',
0, // variableFetchCycleId
],
expect.objectContaining({
enabled: true, // Type is 'DYNAMIC'
enabled: true, // isVariableFetching is true from mock
queryFn: expect.any(Function),
onSuccess: expect.any(Function),
onError: expect.any(Function),

View File

@@ -8,14 +8,6 @@ import '@testing-library/jest-dom/extend-expect';
import VariableItem from '../VariableItem';
const mockOnValueUpdate = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
const baseDependencyData = {
order: [],
graph: {},
parentDependencyGraph: {},
hasCycle: false,
};
const TEST_VARIABLE_ID = 'test_variable';
const VARIABLE_SELECT_TESTID = 'variable-select';
@@ -31,9 +23,6 @@ const renderVariableItem = (
variableData={variableData}
existingVariables={{}}
onValueUpdate={mockOnValueUpdate}
variablesToGetUpdated={[]}
setVariablesToGetUpdated={mockSetVariablesToGetUpdated}
dependencyData={baseDependencyData}
/>
</MockQueryClientProvider>,
);

View File

@@ -2,14 +2,12 @@ import {
buildDependencies,
buildDependencyGraph,
buildParentDependencyGraph,
checkAPIInvocation,
onUpdateVariableNode,
VariableGraph,
} from '../util';
import {
buildDependenciesMock,
buildGraphMock,
checkAPIInvocationMock,
onUpdateVariableNodeMock,
} from './mock';
@@ -72,97 +70,6 @@ describe('dashboardVariables - utilities and processors', () => {
});
});
describe('checkAPIInvocation', () => {
const {
variablesToGetUpdated,
variableData,
parentDependencyGraph,
} = checkAPIInvocationMock;
const mockRootElement = {
name: 'deployment_environment',
key: '036a47cd-9ffc-47de-9f27-0329198964a8',
id: '036a47cd-9ffc-47de-9f27-0329198964a8',
modificationUUID: '5f71b591-f583-497c-839d-6a1590c3f60f',
selectedValue: 'production',
type: 'QUERY',
// ... other properties omitted for brevity
} as any;
describe('edge cases', () => {
it('should return false when variableData is empty', () => {
expect(
checkAPIInvocation(
variablesToGetUpdated,
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return true when parentDependencyGraph is empty', () => {
expect(
checkAPIInvocation(variablesToGetUpdated, variableData, {}),
).toBeFalsy();
});
});
describe('variable sequences', () => {
it('should return true for valid sequence', () => {
expect(
checkAPIInvocation(
['k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return false for invalid sequence', () => {
expect(
checkAPIInvocation(
['k8s_cluster_name', 'k8s_node_name', 'k8s_namespace_name'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
it('should return false when variableData is not in sequence', () => {
expect(
checkAPIInvocation(
['deployment_environment', 'service_name', 'endpoint'],
variableData,
parentDependencyGraph,
),
).toBeFalsy();
});
});
describe('root element behavior', () => {
it('should return true for valid root element sequence', () => {
expect(
checkAPIInvocation(
[
'deployment_environment',
'service_name',
'endpoint',
'http_status_code',
],
mockRootElement,
parentDependencyGraph,
),
).toBeTruthy();
});
it('should return true for empty variablesToGetUpdated array', () => {
expect(
checkAPIInvocation([], mockRootElement, parentDependencyGraph),
).toBeTruthy();
});
});
});
describe('Graph Building Utilities', () => {
const { graph } = buildGraphMock;
const { variables } = buildDependenciesMock;
@@ -223,10 +130,86 @@ describe('dashboardVariables - utilities and processors', () => {
},
hasCycle: false,
cycleNodes: undefined,
transitiveDescendants: {
deployment_environment: ['service_name', 'endpoint', 'http_status_code'],
endpoint: ['http_status_code'],
environment: [],
http_status_code: [],
k8s_cluster_name: ['k8s_node_name', 'k8s_namespace_name'],
k8s_namespace_name: [],
k8s_node_name: ['k8s_namespace_name'],
service_name: ['endpoint', 'http_status_code'],
},
};
expect(buildDependencyGraph(graph)).toEqual(expected);
});
it('should return empty transitiveDescendants for an empty graph', () => {
const result = buildDependencyGraph({});
expect(result.transitiveDescendants).toEqual({});
expect(result.order).toEqual([]);
expect(result.hasCycle).toBe(false);
});
it('should compute transitiveDescendants for a linear chain (a -> b -> c)', () => {
const linearGraph: VariableGraph = {
a: ['b'],
b: ['c'],
c: [],
};
const result = buildDependencyGraph(linearGraph);
expect(result.transitiveDescendants).toEqual({
a: ['b', 'c'],
b: ['c'],
c: [],
});
});
it('should compute transitiveDescendants for a diamond dependency (a -> b, a -> c, b -> d, c -> d)', () => {
const diamondGraph: VariableGraph = {
a: ['b', 'c'],
b: ['d'],
c: ['d'],
d: [],
};
const result = buildDependencyGraph(diamondGraph);
expect(result.transitiveDescendants.a).toEqual(
expect.arrayContaining(['b', 'c', 'd']),
);
expect(result.transitiveDescendants.a).toHaveLength(3);
expect(result.transitiveDescendants.b).toEqual(['d']);
expect(result.transitiveDescendants.c).toEqual(['d']);
expect(result.transitiveDescendants.d).toEqual([]);
});
it('should handle disconnected components in transitiveDescendants', () => {
const disconnectedGraph: VariableGraph = {
a: ['b'],
b: [],
x: ['y'],
y: [],
};
const result = buildDependencyGraph(disconnectedGraph);
expect(result.transitiveDescendants.a).toEqual(['b']);
expect(result.transitiveDescendants.b).toEqual([]);
expect(result.transitiveDescendants.x).toEqual(['y']);
expect(result.transitiveDescendants.y).toEqual([]);
});
it('should return empty transitiveDescendants for all leaf nodes', () => {
const leafOnlyGraph: VariableGraph = {
a: [],
b: [],
c: [],
};
const result = buildDependencyGraph(leafOnlyGraph);
expect(result.transitiveDescendants).toEqual({
a: [],
b: [],
c: [],
});
});
});
describe('buildDependencies', () => {

View File

@@ -1,36 +1,3 @@
/* eslint-disable sonarjs/no-duplicate-string */
export const checkAPIInvocationMock = {
variablesToGetUpdated: [],
variableData: {
name: 'k8s_node_name',
key: '4d71d385-beaf-4434-8dbf-c62be68049fc',
allSelected: false,
customValue: '',
description: '',
id: '4d71d385-beaf-4434-8dbf-c62be68049fc',
modificationUUID: '77233d3c-96d7-4ccb-aa9d-11b04d563068',
multiSelect: false,
order: 6,
queryValue:
"SELECT JSONExtractString(labels, 'k8s_node_name') AS k8s_node_name\nFROM signoz_metrics.distributed_time_series_v4_1day\nWHERE metric_name = 'k8s_node_cpu_time' AND JSONExtractString(labels, 'k8s_cluster_name') = {{.k8s_cluster_name}}\nGROUP BY k8s_node_name",
selectedValue: 'gke-signoz-saas-si-consumer-bsc-e2sd4-a6d430fa-gvm2',
showALLOption: false,
sort: 'DISABLED',
textboxValue: '',
type: 'QUERY',
},
parentDependencyGraph: {
deployment_environment: [],
service_name: ['deployment_environment'],
endpoint: ['deployment_environment', 'service_name'],
http_status_code: ['endpoint'],
k8s_cluster_name: [],
environment: [],
k8s_node_name: ['k8s_cluster_name'],
k8s_namespace_name: ['k8s_cluster_name', 'k8s_node_name'],
},
} as any;
export const onUpdateVariableNodeMock = {
nodeToUpdate: 'deployment_environment',
graph: {

View File

@@ -1,9 +1,16 @@
import { OptionData } from 'components/NewSelect/types';
import { isEmpty, isNull } from 'lodash-es';
import { SOMETHING_WENT_WRONG } from 'constants/api';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isEmpty } from 'lodash-es';
import {
IDashboardVariables,
IDependencyData,
} from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
export function areArraysEqual(
@@ -45,30 +52,16 @@ const getDependentVariablesBasedOnVariableName = (
}
return variables
?.map((variable: any) => {
.map((variable) => {
if (variable.type === 'QUERY') {
// Combined pattern for all formats
// {{.variable_name}} - original format
// $variable_name - dollar prefix format
// [[variable_name]] - square bracket format
// {{variable_name}} - without dot format
const patterns = [
`\\{\\{\\s*?\\.${variableName}\\s*?\\}\\}`, // {{.var}}
`\\{\\{\\s*${variableName}\\s*\\}\\}`, // {{var}}
`\\$${variableName}\\b`, // $var
`\\[\\[\\s*${variableName}\\s*\\]\\]`, // [[var]]
];
const combinedRegex = new RegExp(patterns.join('|'));
const queryValue = variable.queryValue || '';
const dependVarReMatch = queryValue.match(combinedRegex);
if (dependVarReMatch !== null && dependVarReMatch.length > 0) {
if (textContainsVariableReference(queryValue, variableName)) {
return variable.name;
}
}
return null;
})
.filter((val: string | null) => !isNull(val));
.filter((val): val is string => val !== null);
};
export type VariableGraph = Record<string, string[]>;
@@ -246,10 +239,26 @@ export const buildDependencyGraph = (
const hasCycle = topologicalOrder.length !== Object.keys(dependencies)?.length;
// Pre-compute transitive descendants by walking topological order in reverse.
// Each node's transitive descendants = direct children + their transitive descendants.
const transitiveDescendants: VariableGraph = {};
for (let i = topologicalOrder.length - 1; i >= 0; i--) {
const node = topologicalOrder[i];
const desc = new Set<string>();
for (const child of adjList[node] || []) {
desc.add(child);
for (const d of transitiveDescendants[child] || []) {
desc.add(d);
}
}
transitiveDescendants[node] = Array.from(desc);
}
return {
order: topologicalOrder,
graph: adjList,
parentDependencyGraph: buildParentDependencyGraph(adjList),
transitiveDescendants,
hasCycle,
cycleNodes,
};
@@ -284,33 +293,6 @@ export const onUpdateVariableNode = (
});
};
export const checkAPIInvocation = (
variablesToGetUpdated: string[],
variableData: IDashboardVariable,
parentDependencyGraph?: VariableGraph,
): boolean => {
if (isEmpty(variableData.name)) {
return false;
}
if (isEmpty(parentDependencyGraph)) {
return false;
}
// if no dependency then true
const haveDependency =
parentDependencyGraph?.[variableData.name || '']?.length > 0;
if (!haveDependency) {
return true;
}
// if variable is in the list and has dependency then check if its the top element in the queue then true else false
return (
variablesToGetUpdated.length > 0 &&
variablesToGetUpdated[0] === variableData.name
);
};
export const getOptionsForDynamicVariable = (
normalizedValues: (string | number | boolean)[],
relatedValues: string[],
@@ -375,3 +357,130 @@ export const getSelectValue = (
}
return selectedValue?.toString();
};
/**
* Merges multiple arrays of values into a single deduplicated string array.
*/
export function mergeUniqueStrings(
...arrays: (string | number | boolean)[][]
): string[] {
return [...new Set(arrays.flatMap((arr) => arr.map((v) => v.toString())))];
}
function isEligibleFilterVariable(
variable: IDashboardVariable,
currentVariableId: string,
): boolean {
if (variable.id === currentVariableId) {
return false;
}
if (variable.type !== 'DYNAMIC') {
return false;
}
if (!variable.dynamicVariablesAttribute) {
return false;
}
if (!variable.selectedValue || isEmpty(variable.selectedValue)) {
return false;
}
return !(variable.showALLOption && variable.allSelected);
}
function formatQueryValue(val: string): string {
const numValue = Number(val);
if (!Number.isNaN(numValue) && Number.isFinite(numValue)) {
return val;
}
return `'${val.replace(/'/g, "\\'")}'`;
}
function buildQueryPart(attribute: string, values: string[]): string {
const formatted = values.map(formatQueryValue);
if (formatted.length === 1) {
return `${attribute} = ${formatted[0]}`;
}
return `${attribute} IN [${formatted.join(', ')}]`;
}
/**
* Builds a filter query string from sibling dynamic variables' selected values.
* e.g. `k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'`
*/
export function buildExistingDynamicVariableQuery(
existingVariables: IDashboardVariables | null,
currentVariableId: string,
hasDynamicAttribute: boolean,
): string {
if (!existingVariables || !hasDynamicAttribute) {
return '';
}
const queryParts: string[] = [];
for (const variable of Object.values(existingVariables)) {
// Skip the current variable being processed
if (!isEligibleFilterVariable(variable, currentVariableId)) {
continue;
}
const rawValues = Array.isArray(variable.selectedValue)
? variable.selectedValue
: [variable.selectedValue];
// Filter out empty values and convert to strings
const validValues = rawValues
.filter(
(val): val is string | number | boolean =>
val !== null && val !== undefined && val !== '',
)
.map((val) => val.toString());
if (validValues.length > 0 && variable.dynamicVariablesAttribute) {
queryParts.push(
buildQueryPart(variable.dynamicVariablesAttribute, validValues),
);
}
}
return queryParts.join(' AND ');
}
function isVariableInActiveFetchState(state: string | undefined): boolean {
return state === 'loading' || state === 'revalidating';
}
/**
* Completes or fails a variable's fetch state machine transition.
* No-ops if the variable is not currently in an active fetch state.
*/
export function settleVariableFetch(
name: string | undefined,
outcome: 'complete' | 'failure',
): void {
if (!name) {
return;
}
const currentState = variableFetchStore.getSnapshot().states[name];
if (!isVariableInActiveFetchState(currentState)) {
return;
}
if (outcome === 'complete') {
onVariableFetchComplete(name);
} else {
onVariableFetchFailure(name);
}
}
export function extractErrorMessage(
error: { message?: string } | null,
): string {
if (!error) {
return SOMETHING_WENT_WRONG;
}
return (
error.message ||
'Please make sure configuration is valid and you have required setup and permissions'
);
}

View File

@@ -1,4 +1,31 @@
import { areArraysEqual, onUpdateVariableNode, VariableGraph } from './util';
jest.mock('providers/Dashboard/store/variableFetchStore', () => ({
variableFetchStore: {
getSnapshot: jest.fn(),
},
onVariableFetchComplete: jest.fn(),
onVariableFetchFailure: jest.fn(),
}));
import {
onVariableFetchComplete,
onVariableFetchFailure,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
areArraysEqual,
buildExistingDynamicVariableQuery,
extractErrorMessage,
mergeUniqueStrings,
onUpdateVariableNode,
settleVariableFetch,
VariableGraph,
} from './util';
// ────────────────────────────────────────────────────────────────
// Existing tests
// ────────────────────────────────────────────────────────────────
describe('areArraysEqual', () => {
it('should return true for equal arrays with same order', () => {
@@ -149,3 +176,348 @@ describe('onUpdateVariableNode', () => {
expect(visited).toEqual(['namespace', 'service', 'pod']);
});
});
// ────────────────────────────────────────────────────────────────
// New tests for functions added in recent commits
// ────────────────────────────────────────────────────────────────
function makeDynamicVar(
overrides: Partial<IDashboardVariable> & { id: string },
): IDashboardVariable {
return {
name: overrides.id,
description: '',
type: 'DYNAMIC',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
allSelected: false,
dynamicVariablesAttribute: 'attr',
selectedValue: 'some-value',
...overrides,
} as IDashboardVariable;
}
describe('mergeUniqueStrings', () => {
it('should merge two arrays and deduplicate', () => {
expect(mergeUniqueStrings(['a', 'b'], ['b', 'c'])).toEqual(['a', 'b', 'c']);
});
it('should convert numbers and booleans to strings', () => {
expect(mergeUniqueStrings([1, true, 'hello'], [2, false])).toEqual([
'1',
'true',
'hello',
'2',
'false',
]);
});
it('should deduplicate when number and its string form both appear', () => {
expect(mergeUniqueStrings([42], ['42'])).toEqual(['42']);
});
it('should handle a single array', () => {
expect(mergeUniqueStrings(['x', 'y', 'x'])).toEqual(['x', 'y']);
});
it('should handle three or more arrays', () => {
expect(mergeUniqueStrings(['a'], ['b'], ['c'], ['a', 'c'])).toEqual([
'a',
'b',
'c',
]);
});
it('should return empty array when no arrays are provided', () => {
expect(mergeUniqueStrings()).toEqual([]);
});
it('should return empty array when all input arrays are empty', () => {
expect(mergeUniqueStrings([], [], [])).toEqual([]);
});
it('should preserve order of first occurrence', () => {
expect(mergeUniqueStrings(['c', 'a'], ['b', 'a'])).toEqual(['c', 'a', 'b']);
});
});
describe('buildExistingDynamicVariableQuery', () => {
// --- Guard clauses ---
it('should return empty string when existingVariables is null', () => {
expect(buildExistingDynamicVariableQuery(null, 'v1', true)).toBe('');
});
it('should return empty string when hasDynamicAttribute is false', () => {
const variables = { v2: makeDynamicVar({ id: 'v2' }) };
expect(buildExistingDynamicVariableQuery(variables, 'v1', false)).toBe('');
});
// --- Eligibility filtering ---
it('should skip the current variable (same id)', () => {
const variables = {
v1: makeDynamicVar({
id: 'v1',
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip non-DYNAMIC variables', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables without dynamicVariablesAttribute', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: undefined,
selectedValue: 'val',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with null selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: null }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with empty string selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: '' }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables with empty array selectedValue', () => {
const variables = {
v2: makeDynamicVar({ id: 'v2', selectedValue: [] }),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should skip variables where showALLOption and allSelected are both true', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
showALLOption: true,
allSelected: true,
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
it('should include variable with showALLOption true but allSelected false', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
showALLOption: true,
allSelected: false,
dynamicVariablesAttribute: 'ns',
selectedValue: 'prod',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"ns = 'prod'",
);
});
// --- Value formatting ---
it('should quote string values in the query', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: 'zeus',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name = 'zeus'",
);
});
it('should leave numeric values unquoted', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'http.status_code',
selectedValue: '200',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
'http.status_code = 200',
);
});
it('should escape single quotes in string values', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'user.name',
selectedValue: "O'Brien",
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"user.name = 'O\\'Brien'",
);
});
it('should build an IN clause for array selectedValue with multiple items', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: ['zeus', 'gene'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name IN ['zeus', 'gene']",
);
});
it('should handle mix of numeric and string values in IN clause', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'http.status_code',
selectedValue: ['200', 'unknown'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"http.status_code IN [200, 'unknown']",
);
});
it('should filter out empty string values from array', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'region',
selectedValue: ['us-east', '', 'eu-west'],
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"region IN ['us-east', 'eu-west']",
);
});
// --- Multiple siblings ---
it('should join multiple sibling variables with AND', () => {
const variables = {
v2: makeDynamicVar({
id: 'v2',
dynamicVariablesAttribute: 'k8s.namespace.name',
selectedValue: ['zeus', 'gene'],
}),
v3: makeDynamicVar({
id: 'v3',
dynamicVariablesAttribute: 'doc_op_type',
selectedValue: 'test',
}),
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe(
"k8s.namespace.name IN ['zeus', 'gene'] AND doc_op_type = 'test'",
);
});
it('should return empty string when no variables are eligible', () => {
const variables = {
v1: makeDynamicVar({ id: 'v1' }), // same as current — skipped
v2: makeDynamicVar({ id: 'v2', type: 'QUERY' as any }), // not DYNAMIC
v3: makeDynamicVar({ id: 'v3', selectedValue: null }), // no value
};
expect(buildExistingDynamicVariableQuery(variables, 'v1', true)).toBe('');
});
});
describe('settleVariableFetch', () => {
const mockGetSnapshot = variableFetchStore.getSnapshot as jest.Mock;
const mockComplete = onVariableFetchComplete as jest.Mock;
const mockFailure = onVariableFetchFailure as jest.Mock;
beforeEach(() => {
jest.clearAllMocks();
});
it('should no-op when name is undefined', () => {
settleVariableFetch(undefined, 'complete');
expect(mockGetSnapshot).not.toHaveBeenCalled();
expect(mockComplete).not.toHaveBeenCalled();
expect(mockFailure).not.toHaveBeenCalled();
});
it.each(['idle', 'waiting', 'error', undefined] as const)(
'should no-op when variable state is %s',
(state) => {
mockGetSnapshot.mockReturnValue({ states: { myVar: state } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).not.toHaveBeenCalled();
expect(mockFailure).not.toHaveBeenCalled();
},
);
it('should call onVariableFetchComplete when state is loading and outcome is complete', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).toHaveBeenCalledWith('myVar');
expect(mockFailure).not.toHaveBeenCalled();
});
it('should call onVariableFetchComplete when state is revalidating and outcome is complete', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
settleVariableFetch('myVar', 'complete');
expect(mockComplete).toHaveBeenCalledWith('myVar');
expect(mockFailure).not.toHaveBeenCalled();
});
it('should call onVariableFetchFailure when state is loading and outcome is failure', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'loading' } });
settleVariableFetch('myVar', 'failure');
expect(mockFailure).toHaveBeenCalledWith('myVar');
expect(mockComplete).not.toHaveBeenCalled();
});
it('should call onVariableFetchFailure when state is revalidating and outcome is failure', () => {
mockGetSnapshot.mockReturnValue({ states: { myVar: 'revalidating' } });
settleVariableFetch('myVar', 'failure');
expect(mockFailure).toHaveBeenCalledWith('myVar');
expect(mockComplete).not.toHaveBeenCalled();
});
});
describe('extractErrorMessage', () => {
const FALLBACK_MESSAGE =
'Please make sure configuration is valid and you have required setup and permissions';
it('should return SOMETHING_WENT_WRONG when error is null', () => {
expect(extractErrorMessage(null)).toBe('Something went wrong');
});
it('should return the error message when it exists', () => {
expect(extractErrorMessage({ message: 'Query timeout' })).toBe(
'Query timeout',
);
});
it('should return fallback when error object has no message property', () => {
expect(extractErrorMessage({})).toBe(FALLBACK_MESSAGE);
});
it('should return fallback when error.message is empty string', () => {
expect(extractErrorMessage({ message: '' })).toBe(FALLBACK_MESSAGE);
});
it('should return fallback when error.message is undefined', () => {
expect(extractErrorMessage({ message: undefined })).toBe(FALLBACK_MESSAGE);
});
});

View File

@@ -1,17 +1,11 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { VariableItemProps } from '../VariableItem';
export interface VariableSelectStrategy {
handleChange(params: {
value: string | string[];
variableData: IDashboardVariable;
variableData: VariableItemProps['variableData'];
onValueUpdate: VariableItemProps['onValueUpdate'];
optionsData: (string | number | boolean)[];
allAvailableOptionStrings: string[];
onValueUpdate: (
name: string,
id: string,
value: IDashboardVariable['selectedValue'],
allSelected: boolean,
haveCustomValuesSelected?: boolean,
) => void;
}): void;
}

View File

@@ -17,6 +17,19 @@ import { IDashboardVariable } from 'types/api/dashboard/getAll';
import DynamicVariableInput from '../DashboardVariablesSelection/DynamicVariableInput';
// Mock useVariableFetchState to return "fetching" state so useQuery is enabled
jest.mock('hooks/dashboard/useVariableFetchState', () => ({
useVariableFetchState: (): Record<string, unknown> => ({
variableFetchCycleId: 0,
variableFetchState: 'loading',
isVariableSettled: false,
isVariableFetching: true,
hasVariableFetchedOnce: false,
isVariableWaitingForDependencies: false,
variableDependencyWaitMessage: '',
}),
}));
// Mock the getFieldValues API
jest.mock('api/dynamicVariables/getFieldValues', () => ({
getFieldValues: jest.fn(),
@@ -95,7 +108,7 @@ describe('Dynamic Variable Default Behavior', () => {
}
}
if (queryFn) {
queryFn();
queryFn({ signal: undefined });
}
}
}, [enabled, variableName, dynamicVarsKey]); // Only depend on enabled/keys
@@ -234,6 +247,7 @@ describe('Dynamic Variable Default Behavior', () => {
'2023-01-01T00:00:00Z',
'2023-01-02T00:00:00Z',
'',
undefined, // signal
);
});
@@ -487,6 +501,7 @@ describe('Dynamic Variable Default Behavior', () => {
'2023-01-01T00:00:00Z',
'2023-01-02T00:00:00Z',
'',
undefined, // signal
);
});

View File

@@ -49,15 +49,11 @@ const mockDashboard = {
// Mock the dashboard provider with stable functions to prevent infinite loops
const mockSetSelectedDashboard = jest.fn();
const mockUpdateLocalStorageDashboardVariables = jest.fn();
const mockSetVariablesToGetUpdated = jest.fn();
jest.mock('providers/Dashboard/Dashboard', () => ({
useDashboard: (): any => ({
selectedDashboard: mockDashboard,
setSelectedDashboard: mockSetSelectedDashboard,
updateLocalStorageDashboardVariables: mockUpdateLocalStorageDashboardVariables,
variablesToGetUpdated: ['env'], // Stable initial value
setVariablesToGetUpdated: mockSetVariablesToGetUpdated,
}),
}));

View File

@@ -0,0 +1,117 @@
import { AlignedData } from 'uplot';
import { getInitialStackedBands, stack } from '../stackUtils';
describe('stackUtils', () => {
describe('stack', () => {
const neverOmit = (): boolean => false;
it('preserves time axis as first row', () => {
const data: AlignedData = [
[100, 200, 300],
[1, 2, 3],
[4, 5, 6],
];
const { data: result } = stack(data, neverOmit);
expect(result[0]).toEqual([100, 200, 300]);
});
it('stacks value series cumulatively (last = raw, first = total)', () => {
// Time, then 3 value series. Stack order: last series stays raw, then we add upward.
const data: AlignedData = [
[0, 1, 2],
[1, 2, 3], // series 1
[4, 5, 6], // series 2
[7, 8, 9], // series 3
];
const { data: result } = stack(data, neverOmit);
// result[1] = s1+s2+s3, result[2] = s2+s3, result[3] = s3
expect(result[1]).toEqual([12, 15, 18]); // 1+4+7, 2+5+8, 3+6+9
expect(result[2]).toEqual([11, 13, 15]); // 4+7, 5+8, 6+9
expect(result[3]).toEqual([7, 8, 9]);
});
it('treats null values as 0 when stacking', () => {
const data: AlignedData = [
[0, 1],
[1, null],
[null, 10],
];
const { data: result } = stack(data, neverOmit);
expect(result[1]).toEqual([1, 10]); // total
expect(result[2]).toEqual([0, 10]); // last series with null→0
});
it('copies omitted series as-is without accumulating', () => {
// Omit series 2 (index 2); series 1 and 3 are stacked.
const data: AlignedData = [
[0, 1],
[10, 20], // series 1
[100, 200], // series 2 - omitted
[1, 2], // series 3
];
const omitSeries2 = (i: number): boolean => i === 2;
const { data: result } = stack(data, omitSeries2);
// series 3 raw: [1, 2]; series 2 omitted: [100, 200] as-is; series 1 stacked with s3: [11, 22]
expect(result[1]).toEqual([11, 22]); // 10+1, 20+2
expect(result[2]).toEqual([100, 200]); // copied, not stacked
expect(result[3]).toEqual([1, 2]);
});
it('returns bands between consecutive visible series when none omitted', () => {
const data: AlignedData = [
[0, 1],
[1, 2],
[3, 4],
[5, 6],
];
const { bands } = stack(data, neverOmit);
expect(bands).toEqual([{ series: [1, 2] }, { series: [2, 3] }]);
});
it('returns bands only between visible series when some are omitted', () => {
// 4 value series; omit index 2. Visible: 1, 3, 4. Bands: [1,3], [3,4]
const data: AlignedData = [[0], [1], [2], [3], [4]];
const omitSeries2 = (i: number): boolean => i === 2;
const { bands } = stack(data, omitSeries2);
expect(bands).toEqual([{ series: [1, 3] }, { series: [3, 4] }]);
});
it('returns empty bands when only one value series', () => {
const data: AlignedData = [
[0, 1],
[1, 2],
];
const { bands } = stack(data, neverOmit);
expect(bands).toEqual([]);
});
});
describe('getInitialStackedBands', () => {
it('returns one band between each consecutive pair for seriesCount 3', () => {
expect(getInitialStackedBands(3)).toEqual([
{ series: [1, 2] },
{ series: [2, 3] },
]);
});
it('returns empty array for seriesCount 0 or 1', () => {
expect(getInitialStackedBands(0)).toEqual([]);
expect(getInitialStackedBands(1)).toEqual([]);
});
it('returns single band for seriesCount 2', () => {
expect(getInitialStackedBands(2)).toEqual([{ series: [1, 2] }]);
});
it('returns bands [1,2], [2,3], ..., [n-1, n] for seriesCount n', () => {
const bands = getInitialStackedBands(5);
expect(bands).toEqual([
{ series: [1, 2] },
{ series: [2, 3] },
{ series: [3, 4] },
{ series: [4, 5] },
]);
});
});
});

View File

@@ -0,0 +1,116 @@
import uPlot, { AlignedData } from 'uplot';
/**
* Stack data cumulatively (top-down: first series = top, last = bottom).
* When `omit(seriesIndex)` returns true, that series is excluded from stacking.
*/
export function stack(
data: AlignedData,
omit: (seriesIndex: number) => boolean,
): { data: AlignedData; bands: uPlot.Band[] } {
const timeAxis = data[0];
const pointCount = timeAxis.length;
const valueSeriesCount = data.length - 1; // exclude time axis
const stackedSeries = buildStackedSeries({
data,
valueSeriesCount,
pointCount,
omit,
});
const bands = buildFillBands(valueSeriesCount + 1, omit); // +1 for 1-based series indices
return {
data: [timeAxis, ...stackedSeries] as AlignedData,
bands,
};
}
interface BuildStackedSeriesParams {
data: AlignedData;
valueSeriesCount: number;
pointCount: number;
omit: (seriesIndex: number) => boolean;
}
/**
* Accumulate from last series upward: last series = raw values, first = total.
* Omitted series are copied as-is (no accumulation).
*/
function buildStackedSeries({
data,
valueSeriesCount,
pointCount,
omit,
}: BuildStackedSeriesParams): (number | null)[][] {
const stackedSeries: (number | null)[][] = Array(valueSeriesCount);
const cumulativeSums = Array(pointCount).fill(0) as number[];
for (let seriesIndex = valueSeriesCount; seriesIndex >= 1; seriesIndex--) {
const rawValues = data[seriesIndex] as (number | null)[];
if (omit(seriesIndex)) {
stackedSeries[seriesIndex - 1] = rawValues;
} else {
stackedSeries[seriesIndex - 1] = rawValues.map((rawValue, pointIndex) => {
const numericValue = rawValue == null ? 0 : Number(rawValue);
return (cumulativeSums[pointIndex] += numericValue);
});
}
}
return stackedSeries;
}
/**
* Bands define fill between consecutive visible series for stacked appearance.
* uPlot format: [upperSeriesIdx, lowerSeriesIdx].
*/
function buildFillBands(
seriesLength: number,
omit: (seriesIndex: number) => boolean,
): uPlot.Band[] {
const bands: uPlot.Band[] = [];
for (let seriesIndex = 1; seriesIndex < seriesLength; seriesIndex++) {
if (omit(seriesIndex)) {
continue;
}
const nextVisibleSeriesIndex = findNextVisibleSeriesIndex(
seriesLength,
seriesIndex,
omit,
);
if (nextVisibleSeriesIndex !== -1) {
bands.push({ series: [seriesIndex, nextVisibleSeriesIndex] });
}
}
return bands;
}
function findNextVisibleSeriesIndex(
seriesLength: number,
afterIndex: number,
omit: (seriesIndex: number) => boolean,
): number {
for (let i = afterIndex + 1; i < seriesLength; i++) {
if (!omit(i)) {
return i;
}
}
return -1;
}
/**
* Returns band indices for initial stacked state (no series omitted).
* Top-down: first series at top, band fills between consecutive series.
* uPlot band format: [upperSeriesIdx, lowerSeriesIdx].
*/
export function getInitialStackedBands(seriesCount: number): uPlot.Band[] {
const bands: uPlot.Band[] = [];
for (let seriesIndex = 1; seriesIndex < seriesCount; seriesIndex++) {
bands.push({ series: [seriesIndex, seriesIndex + 1] });
}
return bands;
}

View File

@@ -0,0 +1,313 @@
import { renderHook } from '@testing-library/react';
import uPlot from 'uplot';
import type { UseBarChartStackingParams } from '../useBarChartStacking';
import { useBarChartStacking } from '../useBarChartStacking';
type MockConfig = { addHook: jest.Mock };
function asConfig(c: MockConfig): UseBarChartStackingParams['config'] {
return (c as unknown) as UseBarChartStackingParams['config'];
}
function createMockConfig(): {
config: MockConfig;
invokeSetData: (plot: uPlot) => void;
invokeSetSeries: (
plot: uPlot,
seriesIndex: number | null,
opts: Partial<uPlot.Series> & { focus?: boolean },
) => void;
removeSetData: jest.Mock;
removeSetSeries: jest.Mock;
} {
let setDataHandler: ((plot: uPlot) => void) | null = null;
let setSeriesHandler:
| ((plot: uPlot, seriesIndex: number | null, opts: uPlot.Series) => void)
| null = null;
const removeSetData = jest.fn();
const removeSetSeries = jest.fn();
const addHook = jest.fn(
(
hookName: string,
handler: (plot: uPlot, ...args: unknown[]) => void,
): (() => void) => {
if (hookName === 'setData') {
setDataHandler = handler as (plot: uPlot) => void;
return removeSetData;
}
if (hookName === 'setSeries') {
setSeriesHandler = handler as (
plot: uPlot,
seriesIndex: number | null,
opts: uPlot.Series,
) => void;
return removeSetSeries;
}
return jest.fn();
},
);
const config: MockConfig = { addHook };
const invokeSetData = (plot: uPlot): void => {
setDataHandler?.(plot);
};
const invokeSetSeries = (
plot: uPlot,
seriesIndex: number | null,
opts: Partial<uPlot.Series> & { focus?: boolean },
): void => {
setSeriesHandler?.(plot, seriesIndex, opts as uPlot.Series);
};
return {
config,
invokeSetData,
invokeSetSeries,
removeSetData,
removeSetSeries,
};
}
function createMockPlot(overrides: Partial<uPlot> = {}): uPlot {
return ({
data: [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
],
series: [{ show: true }, { show: true }, { show: true }],
delBand: jest.fn(),
addBand: jest.fn(),
setData: jest.fn(),
...overrides,
} as unknown) as uPlot;
}
describe('useBarChartStacking', () => {
it('returns data as-is when isStackedBarChart is false', () => {
const data: uPlot.AlignedData = [
[100, 200],
[1, 2],
[3, 4],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: false,
config: null,
}),
);
expect(result.current).toBe(data);
});
it('returns data as-is when config is null and isStackedBarChart is true', () => {
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[4, 5],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
// Still returns stacked data (computed in useMemo); no hooks registered
expect(result.current[0]).toEqual([0, 1]);
expect(result.current[1]).toEqual([5, 7]); // stacked
expect(result.current[2]).toEqual([4, 5]);
});
it('returns stacked data when isStackedBarChart is true and multiple value series', () => {
const data: uPlot.AlignedData = [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
expect(result.current[0]).toEqual([0, 1, 2]);
expect(result.current[1]).toEqual([12, 15, 18]); // s1+s2+s3
expect(result.current[2]).toEqual([11, 13, 15]); // s2+s3
expect(result.current[3]).toEqual([7, 8, 9]);
});
it('returns data as-is when only one value series (no stacking needed)', () => {
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
];
const { result } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: null,
}),
);
expect(result.current).toEqual(data);
});
it('registers setData and setSeries hooks when isStackedBarChart and config provided', () => {
const { config } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
expect(config.addHook).toHaveBeenCalledWith('setData', expect.any(Function));
expect(config.addHook).toHaveBeenCalledWith(
'setSeries',
expect.any(Function),
);
});
it('does not register hooks when isStackedBarChart is false', () => {
const { config } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: false,
config: asConfig(config),
}),
);
expect(config.addHook).not.toHaveBeenCalled();
});
it('calls cleanup when unmounted', () => {
const { config, removeSetData, removeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
const { unmount } = renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
unmount();
expect(removeSetData).toHaveBeenCalled();
expect(removeSetSeries).toHaveBeenCalled();
});
it('re-stacks and updates plot when setData hook is invoked', () => {
const { config, invokeSetData } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1, 2],
[1, 2, 3],
[4, 5, 6],
];
const plot = createMockPlot({
data: [
[0, 1, 2],
[5, 7, 9],
[4, 5, 6],
],
});
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
invokeSetData(plot);
expect(plot.delBand).toHaveBeenCalledWith(null);
expect(plot.addBand).toHaveBeenCalled();
expect(plot.setData).toHaveBeenCalledWith(
expect.arrayContaining([
[0, 1, 2],
expect.any(Array), // stacked row 1
expect.any(Array), // stacked row 2
]),
);
});
it('re-stacks when setSeries hook is invoked (e.g. legend toggle)', () => {
const { config, invokeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[10, 20],
[5, 10],
];
// Plot data must match unstacked length so canApplyStacking passes
const plot = createMockPlot({
data: [
[0, 1],
[15, 30],
[5, 10],
],
});
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
invokeSetSeries(plot, 1, { show: false });
expect(plot.setData).toHaveBeenCalled();
});
it('does not re-stack when setSeries is called with focus option', () => {
const { config, invokeSetSeries } = createMockConfig();
const data: uPlot.AlignedData = [
[0, 1],
[1, 2],
[3, 4],
];
const plot = createMockPlot();
renderHook(() =>
useBarChartStacking({
data,
isStackedBarChart: true,
config: asConfig(config),
}),
);
(plot.setData as jest.Mock).mockClear();
invokeSetSeries(plot, 1, { focus: true } as uPlot.Series);
expect(plot.setData).not.toHaveBeenCalled();
});
});

View File

@@ -11,6 +11,7 @@ import {
VisibilityMode,
} from 'lib/uPlotV2/config/types';
import { UPlotConfigBuilder } from 'lib/uPlotV2/config/UPlotConfigBuilder';
import { get } from 'lodash-es';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { Query } from 'types/api/queryBuilder/queryBuilderData';
@@ -77,6 +78,12 @@ export function prepareBarPanelConfig({
builder.setBands(getInitialStackedBands(seriesCount));
}
const stepIntervals: Record<string, number> = get(
apiResponse,
'data.newResult.meta.stepIntervals',
{},
);
const seriesList: QueryData[] = apiResponse?.data?.result || [];
seriesList.forEach((series) => {
const baseLabelName = getLabelName(
@@ -89,6 +96,8 @@ export function prepareBarPanelConfig({
? getLegend(series, currentQuery, baseLabelName)
: baseLabelName;
const currentStepInterval = get(stepIntervals, series.queryName, undefined);
builder.addSeries({
scaleKey: 'y',
drawStyle: DrawStyle.Bar,
@@ -101,6 +110,7 @@ export function prepareBarPanelConfig({
showPoints: VisibilityMode.Never,
pointSize: 5,
isDarkMode,
stepInterval: currentStepInterval,
});
});

View File

@@ -6,10 +6,12 @@ import { QueryParams } from 'constants/query';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { populateMultipleResults } from 'container/NewWidget/LeftContainer/WidgetGraph/util';
import { CustomTimeType } from 'container/TopNav/DateTimeSelectionV2/types';
import { useIsPanelWaitingOnVariable } from 'hooks/dashboard/useVariableFetchState';
import { useGetQueryRange } from 'hooks/queryBuilder/useGetQueryRange';
import { useIntersectionObserver } from 'hooks/useIntersectionObserver';
import { GetQueryResultsProps } from 'lib/dashboard/getQueryResults';
import { getDashboardVariables } from 'lib/dashboardVariables/getDashboardVariables';
import { getVariableReferencesInQuery } from 'lib/dashboardVariables/variableReference';
import getTimeString from 'lib/getTimeString';
import { isEqual } from 'lodash-es';
import isEmpty from 'lodash-es/isEmpty';
@@ -53,7 +55,6 @@ function GridCardGraph({
customOnRowClick,
customTimeRangeWindowForCoRelation,
enableDrillDown,
widgetsByDynamicVariableId,
}: GridCardGraphProps): JSX.Element {
const dispatch = useDispatch();
const [errorMessage, setErrorMessage] = useState<string>();
@@ -64,8 +65,8 @@ function GridCardGraph({
toScrollWidgetId,
setToScrollWidgetId,
setDashboardQueryRangeCalled,
variablesToGetUpdated,
} = useDashboard();
const { minTime, maxTime, selectedTime: globalSelectedInterval } = useSelector<
AppState,
GlobalReducer
@@ -117,10 +118,25 @@ function GridCardGraph({
const updatedQuery = widget?.query;
const referencedVariableNames = useMemo(() => {
if (!variables || !updatedQuery) {
return [];
}
const allNames = Object.values(variables)
.map((v) => v.name)
.filter((name): name is string => !!name);
return getVariableReferencesInQuery(updatedQuery, allNames);
}, [updatedQuery, variables]);
const isEmptyWidget =
widget?.id === PANEL_TYPES.EMPTY_WIDGET || isEmpty(widget);
const queryEnabledCondition = isVisible && !isEmptyWidget && isQueryEnabled;
const isPanelWaitingOnAnyVariable = useIsPanelWaitingOnVariable(
referencedVariableNames,
);
const queryEnabledCondition =
isVisible && !isEmptyWidget && isQueryEnabled && !isPanelWaitingOnAnyVariable;
const [requestData, setRequestData] = useState<GetQueryResultsProps>(() => {
if (widget.panelTypes !== PANEL_TYPES.LIST) {
@@ -177,27 +193,6 @@ function GridCardGraph({
[requestData.query],
);
// Bring back dependency on variable chaining for panels to refetch,
// but only for non-dynamic variables. We derive a stable token from
// the head of the variablesToGetUpdated queue when it's non-dynamic.
const nonDynamicVariableChainToken = useMemo(() => {
if (!variablesToGetUpdated || variablesToGetUpdated.length === 0) {
return undefined;
}
if (!variables) {
return undefined;
}
const headName = variablesToGetUpdated[0];
const variableObj = Object.values(variables).find(
(variable) => variable?.name === headName,
);
if (variableObj && variableObj.type !== 'DYNAMIC') {
return headName;
}
return undefined;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [variablesToGetUpdated, variables]);
const queryResponse = useGetQueryRange(
{
...requestData,
@@ -224,11 +219,7 @@ function GridCardGraph({
requestData,
variables
? Object.entries(variables).reduce((acc, [id, variable]) => {
if (
variable.type !== 'DYNAMIC' ||
(widgetsByDynamicVariableId?.[variable.id] &&
widgetsByDynamicVariableId?.[variable.id].includes(widget.id))
) {
if (variable.name && referencedVariableNames.includes(variable.name)) {
return { ...acc, [id]: variable.selectedValue };
}
return acc;
@@ -237,9 +228,6 @@ function GridCardGraph({
...(customTimeRange && customTimeRange.startTime && customTimeRange.endTime
? [customTimeRange.startTime, customTimeRange.endTime]
: []),
// Include non-dynamic variable chaining token to drive refetches
// only when a non-dynamic variable is at the head of the queue
...(nonDynamicVariableChainToken ? [nonDynamicVariableChainToken] : []),
],
retry(failureCount, error): boolean {
if (
@@ -252,7 +240,7 @@ function GridCardGraph({
return failureCount < 2;
},
keepPreviousData: true,
enabled: queryEnabledCondition && !nonDynamicVariableChainToken,
enabled: queryEnabledCondition,
refetchOnMount: false,
onError: (error) => {
const errorMessage =
@@ -319,7 +307,7 @@ function GridCardGraph({
threshold={threshold}
headerMenuList={menuList}
isFetchingResponse={
queryResponse.isFetching || variablesToGetUpdated.length > 0
queryResponse.isFetching || isPanelWaitingOnAnyVariable
}
setRequestData={setRequestData}
onClickHandler={onClickHandler}

View File

@@ -72,7 +72,6 @@ export interface GridCardGraphProps {
customOnRowClick?: (record: RowData) => void;
customTimeRangeWindowForCoRelation?: string | undefined;
enableDrillDown?: boolean;
widgetsByDynamicVariableId?: Record<string, string[]>;
}
export interface GetGraphVisibilityStateOnLegendClickProps {

View File

@@ -16,7 +16,6 @@ import { themeColors } from 'constants/theme';
import { DEFAULT_ROW_NAME } from 'container/DashboardContainer/DashboardDescription/utils';
import { useDashboardVariables } from 'hooks/dashboard/useDashboardVariables';
import { useUpdateDashboard } from 'hooks/dashboard/useUpdateDashboard';
import { useWidgetsByDynamicVariableId } from 'hooks/dashboard/useWidgetsByDynamicVariableId';
import useComponentPermission from 'hooks/useComponentPermission';
import { useIsDarkMode } from 'hooks/useDarkMode';
import { useSafeNavigate } from 'hooks/useSafeNavigate';
@@ -102,8 +101,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
Record<string, { widgets: Layout[]; collapsed: boolean }>
>({});
const widgetsByDynamicVariableId = useWidgetsByDynamicVariableId();
useEffect(() => {
setCurrentPanelMap(panelMap);
}, [panelMap]);
@@ -617,7 +614,6 @@ function GraphLayout(props: GraphLayoutProps): JSX.Element {
onDragSelect={onDragSelect}
dataAvailable={checkIfDataExists}
enableDrillDown={enableDrillDown}
widgetsByDynamicVariableId={widgetsByDynamicVariableId}
/>
</Card>
</CardContainer>

View File

@@ -5,16 +5,12 @@ import { PanelMode } from 'container/DashboardContainer/visualization/panels/typ
import { WidgetGraphComponentProps } from 'container/GridCardLayout/GridCard/types';
import { RowData } from 'lib/query/createTableColumnsFromQuery';
import { OnClickPluginOpts } from 'lib/uPlotLib/plugins/onClickPlugin';
import { SuccessResponse } from 'types/api';
import { Widgets } from 'types/api/dashboard/getAll';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
import { QueryData } from 'types/api/widgets/getQuery';
export type PanelWrapperProps = {
queryResponse: UseQueryResult<
SuccessResponse<MetricRangePayloadProps, unknown>,
Error
>;
queryResponse: UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
widget: Widgets;
setRequestData?: WidgetGraphComponentProps['setRequestData'];
isFullViewMode?: boolean;

View File

@@ -0,0 +1,316 @@
import { act, renderHook } from '@testing-library/react';
import { dashboardVariablesStore } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from 'providers/Dashboard/store/dashboardVariables/dashboardVariablesStoreTypes';
import {
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { useIsPanelWaitingOnVariable } from '../useVariableFetchState';
function makeVariable(
overrides: Partial<IDashboardVariable> & { id: string },
): IDashboardVariable {
return {
name: overrides.id,
description: '',
type: 'QUERY',
sort: 'DISABLED',
multiSelect: false,
showALLOption: false,
...overrides,
};
}
function resetStores(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
function setFetchStates(states: Record<string, VariableFetchState>): void {
variableFetchStore.set(() => ({
states,
lastUpdated: {},
cycleIds: {},
}));
}
function setDashboardVariables(
overrides: Partial<IDashboardVariablesStoreState>,
): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
...overrides,
}));
}
describe('useIsPanelWaitingOnVariable', () => {
beforeEach(() => {
resetStores();
});
it('should return false when variableNames is empty', () => {
const { result } = renderHook(() => useIsPanelWaitingOnVariable([]));
expect(result.current).toBe(false);
});
it('should return false when all referenced variables are idle', () => {
setFetchStates({ a: 'idle', b: 'idle' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val1' }),
b: makeVariable({ id: 'b', selectedValue: 'val2' }),
},
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
expect(result.current).toBe(false);
});
it('should return true when a variable is loading with empty selectedValue', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return true when a variable is waiting with empty selectedValue', () => {
setFetchStates({ a: 'waiting' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: '' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return true when a variable is revalidating with empty selectedValue', () => {
setFetchStates({ a: 'revalidating' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
it('should return false when a variable is loading but has a selectedValue', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'some-value' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return true for DYNAMIC variable with allSelected=true that is loading', () => {
setFetchStates({ dyn: 'loading' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'some-val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should return true for DYNAMIC variable with allSelected=true that is waiting', () => {
setFetchStates({ dyn: 'waiting' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should return false for DYNAMIC variable with allSelected=true that is idle', () => {
setFetchStates({ dyn: 'idle' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: 'val',
allSelected: true,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(false);
});
it('should return false for non-DYNAMIC variable with allSelected=false and non-empty value that is loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({
id: 'a',
selectedValue: 'val',
allSelected: false,
}),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return true if any one of multiple variables is blocking', () => {
setFetchStates({ a: 'idle', b: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val' }),
b: makeVariable({ id: 'b', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a', 'b']));
expect(result.current).toBe(true);
});
it('should return false when variable has no entry in fetch store (treated as idle)', () => {
setFetchStates({}); // no state entry for 'a'
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: 'val' }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should return false when variable is in error state with empty selectedValue', () => {
setFetchStates({ a: 'error' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should react to store updates', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: undefined }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
// Simulate variable fetch completing
act(() => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
});
expect(result.current).toBe(false);
});
it('should handle DYNAMIC variable with allSelected=false and empty selectedValue as blocking', () => {
setFetchStates({ dyn: 'loading' });
setDashboardVariables({
variables: {
dyn: makeVariable({
id: 'dyn',
type: 'DYNAMIC',
selectedValue: undefined,
allSelected: false,
}),
},
variableTypes: { dyn: 'DYNAMIC' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['dyn']));
expect(result.current).toBe(true);
});
it('should handle variable with array selectedValue as non-blocking when loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: ['val1', 'val2'] }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(false);
});
it('should handle variable with empty array selectedValue as blocking when loading', () => {
setFetchStates({ a: 'loading' });
setDashboardVariables({
variables: {
a: makeVariable({ id: 'a', selectedValue: [] }),
},
variableTypes: { a: 'QUERY' },
});
const { result } = renderHook(() => useIsPanelWaitingOnVariable(['a']));
expect(result.current).toBe(true);
});
});

View File

@@ -0,0 +1,153 @@
import { useCallback, useMemo, useRef, useSyncExternalStore } from 'react';
import isEmpty from 'lodash-es/isEmpty';
import {
IVariableFetchStoreState,
VariableFetchState,
variableFetchStore,
} from 'providers/Dashboard/store/variableFetchStore';
import { useDashboardVariablesSelector } from './useDashboardVariables';
/**
* Generic selector hook for the variable fetch store.
* Same pattern as useDashboardVariablesSelector.
*/
const useVariableFetchSelector = <T>(
selector: (state: IVariableFetchStoreState) => T,
): T => {
const selectorRef = useRef(selector);
selectorRef.current = selector;
const getSnapshot = useCallback(
() => selectorRef.current(variableFetchStore.getSnapshot()),
[],
);
return useSyncExternalStore(variableFetchStore.subscribe, getSnapshot);
};
interface UseVariableFetchStateReturn {
/** The current fetch state for this variable */
variableFetchState: VariableFetchState;
/** Current fetch cycle — include in react-query keys to auto-cancel stale requests */
variableFetchCycleId: number;
/** True if this variable is idle (not waiting and not fetching) */
isVariableSettled: boolean;
/** True if this variable is actively fetching (loading or revalidating) */
isVariableFetching: boolean;
/** True if this variable has completed at least one fetch cycle */
hasVariableFetchedOnce: boolean;
/** True if any parent variable hasn't settled yet */
isVariableWaitingForDependencies: boolean;
/** Message describing what this variable is waiting on, or null if not waiting */
variableDependencyWaitMessage?: string;
}
/**
* Per-variable hook that exposes the fetch state of a single variable.
* Reusable by both variable input components and panel components.
*
* Subscribes to both variableFetchStore (for states) and
* dashboardVariablesStore (for parent graph) to compute derived values.
*/
export function useVariableFetchState(
variableName: string,
): UseVariableFetchStateReturn {
// This variable's fetch state (loading, waiting, idle, etc.)
const variableFetchState = useVariableFetchSelector(
(s) => s.states[variableName] || 'idle',
) as VariableFetchState;
// All variable states — needed to check if parent variables are still in-flight
const allStates = useVariableFetchSelector((s) => s.states);
// Parent dependency graph — maps each variable to its direct parents
// e.g. { "childVariable": ["parentVariable"] } means "childVariable" depends on "parentVariable"
const parentGraph = useDashboardVariablesSelector(
(s) => s.dependencyData?.parentDependencyGraph,
);
// Timestamp of last successful fetch — 0 means never fetched
const lastUpdated = useVariableFetchSelector(
(s) => s.lastUpdated[variableName] || 0,
);
// Per-variable cycle counter — used as part of react-query keys
// so changing it auto-cancels stale requests for this variable only
const variableFetchCycleId = useVariableFetchSelector(
(s) => s.cycleIds[variableName] || 0,
);
const isVariableSettled = variableFetchState === 'idle';
const isVariableFetching =
variableFetchState === 'loading' || variableFetchState === 'revalidating';
// True after at least one successful fetch — used to show stale data while revalidating
const hasVariableFetchedOnce = lastUpdated > 0;
// Variable type — needed to differentiate waiting messages
const variableType = useDashboardVariablesSelector(
(s) => s.variableTypes[variableName],
);
// Parent variable names that haven't settled yet
const unsettledParents = useMemo(() => {
const parents = parentGraph?.[variableName] || [];
return parents.filter((p) => (allStates[p] || 'idle') !== 'idle');
}, [parentGraph, variableName, allStates]);
const isVariableWaitingForDependencies = unsettledParents.length > 0;
const variableDependencyWaitMessage = useMemo(() => {
if (variableFetchState !== 'waiting') {
return;
}
if (variableType === 'DYNAMIC') {
return 'Waiting for all query variable options to load.';
}
if (unsettledParents.length === 0) {
return;
}
const quoted = unsettledParents.map((p) => `"${p}"`);
const names =
quoted.length > 1
? `${quoted.slice(0, -1).join(', ')} and ${quoted[quoted.length - 1]}`
: quoted[0];
return `Waiting for options of ${names} to load.`;
}, [variableFetchState, variableType, unsettledParents]);
return {
variableFetchState,
isVariableSettled,
isVariableWaitingForDependencies,
variableDependencyWaitMessage,
isVariableFetching,
hasVariableFetchedOnce,
variableFetchCycleId,
};
}
export function useIsPanelWaitingOnVariable(variableNames: string[]): boolean {
const states = useVariableFetchSelector((s) => s.states);
const dashboardVariables = useDashboardVariablesSelector((s) => s.variables);
const variableTypesMap = useDashboardVariablesSelector((s) => s.variableTypes);
return variableNames.some((name) => {
const variableFetchState = states[name];
const { selectedValue, allSelected } = dashboardVariables?.[name] || {};
const isVariableInFetchingOrWaitingState =
variableFetchState === 'loading' ||
variableFetchState === 'revalidating' ||
variableFetchState === 'waiting';
if (variableTypesMap[name] === 'DYNAMIC' && allSelected) {
return isVariableInFetchingOrWaitingState;
}
return isEmpty(selectedValue) ? isVariableInFetchingOrWaitingState : false;
});
}

View File

@@ -3,6 +3,7 @@ import { TelemetryFieldKey } from 'api/v5/v5';
import { PANEL_TYPES } from 'constants/queryBuilder';
import { convertKeysToColumnFields } from 'container/LogsExplorerList/utils';
import { placeWidgetAtBottom } from 'container/NewWidget/utils';
import { textContainsVariableReference } from 'lib/dashboardVariables/variableReference';
import { isArray } from 'lodash-es';
import {
Dashboard,
@@ -116,10 +117,17 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
variable.name &&
filter.key?.key === variable.dynamicVariablesAttribute &&
((isArray(filter.value) &&
filter.value.includes(`$${variable.name}`)) ||
filter.value === `$${variable.name}`) &&
(isArray(filter.value)
? filter.value.some(
(v) =>
typeof v === 'string' &&
variable.name &&
textContainsVariableReference(v, variable.name),
)
: typeof filter.value === 'string' &&
textContainsVariableReference(filter.value, variable.name)) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -132,7 +140,12 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
queryData.filter?.expression?.includes(`$${variable.name}`) &&
variable.name &&
queryData.filter?.expression &&
textContainsVariableReference(
queryData.filter.expression,
variable.name,
) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -149,7 +162,9 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
promqlQuery.query?.includes(`$${variable.name}`) &&
variable.name &&
promqlQuery.query &&
textContainsVariableReference(promqlQuery.query, variable.name) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);
@@ -165,7 +180,9 @@ export const createDynamicVariableToWidgetsMap = (
dynamicVariables.forEach((variable) => {
if (
variable.dynamicVariablesAttribute &&
clickhouseQuery.query?.includes(`$${variable.name}`) &&
variable.name &&
clickhouseQuery.query &&
textContainsVariableReference(clickhouseQuery.query, variable.name) &&
!dynamicVariableToWidgetsMap[variable.id].includes(widget.id)
) {
dynamicVariableToWidgetsMap[variable.id].push(widget.id);

View File

@@ -10,13 +10,12 @@ import {
GetQueryResultsProps,
} from 'lib/dashboard/getQueryResults';
import getStartEndRangeTime from 'lib/getStartEndRangeTime';
import { SuccessResponse, Warning } from 'types/api';
import APIError from 'types/api/error';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import { MetricQueryRangeSuccessResponse } from 'types/api/metrics/getQueryRange';
import { DataSource } from 'types/common/queryBuilder';
type UseGetQueryRangeOptions = UseQueryOptions<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
MetricQueryRangeSuccessResponse,
APIError | Error
>;
@@ -30,10 +29,7 @@ type UseGetQueryRange = (
widgetIndex: number;
publicDashboardId: string;
},
) => UseQueryResult<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
Error
>;
) => UseQueryResult<MetricQueryRangeSuccessResponse, Error>;
export const useGetQueryRange: UseGetQueryRange = (
requestData,
@@ -145,10 +141,7 @@ export const useGetQueryRange: UseGetQueryRange = (
};
}, [options?.retry]);
return useQuery<
SuccessResponse<MetricRangePayloadProps> & { warning?: Warning },
APIError | Error
>({
return useQuery<MetricQueryRangeSuccessResponse, APIError | Error>({
queryFn: async ({ signal }) =>
GetMetricQueryRange(
modifiedRequestData,

View File

@@ -19,7 +19,11 @@ import { Pagination } from 'hooks/queryPagination';
import { convertNewDataToOld } from 'lib/newQueryBuilder/convertNewDataToOld';
import { isEmpty } from 'lodash-es';
import { SuccessResponse, SuccessResponseV2, Warning } from 'types/api';
import { MetricRangePayloadProps } from 'types/api/metrics/getQueryRange';
import {
MetricQueryRangeSuccessResponse,
MetricRangePayloadProps,
} from 'types/api/metrics/getQueryRange';
import { ExecStats, MetricRangePayloadV5 } from 'types/api/v5/queryRange';
import { IBuilderQuery, Query } from 'types/api/queryBuilder/queryBuilderData';
import { DataSource } from 'types/common/queryBuilder';
@@ -205,13 +209,13 @@ export async function GetMetricQueryRange(
widgetIndex: number;
publicDashboardId: string;
},
): Promise<SuccessResponse<MetricRangePayloadProps> & { warning?: Warning }> {
): Promise<MetricQueryRangeSuccessResponse> {
let legendMap: Record<string, string>;
let response:
| SuccessResponse<MetricRangePayloadProps>
| SuccessResponseV2<MetricRangePayloadV5>
| (SuccessResponse<MetricRangePayloadProps> & { warning?: Warning });
| MetricQueryRangeSuccessResponse
| SuccessResponseV2<MetricRangePayloadV5>;
let warning: Warning | undefined;
let meta: ExecStats | undefined;
const panelType = props.originalGraphType || props.graphType;
@@ -299,6 +303,7 @@ export async function GetMetricQueryRange(
);
warning = response.payload.warning || undefined;
meta = response.payload.meta || undefined;
} else {
const v5Response = await getQueryRangeV5(
v5Result.queryPayload,
@@ -318,6 +323,7 @@ export async function GetMetricQueryRange(
);
warning = response.payload.warning || undefined;
meta = response.payload.meta || undefined;
}
} else {
const legacyResult = prepareQueryRangePayload(props);
@@ -384,6 +390,7 @@ export async function GetMetricQueryRange(
return {
...response,
warning,
meta,
};
}

View File

@@ -81,6 +81,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
barAlignment,
barMaxWidth,
barWidthFactor,
stepInterval,
} = this.props;
if (pathBuilder) {
return { paths: pathBuilder };
@@ -104,6 +105,7 @@ export class UPlotSeriesBuilder extends ConfigBuilder<SeriesProps, Series> {
barAlignment,
barMaxWidth,
barWidthFactor,
stepInterval,
});
return pathsBuilder(self, seriesIdx, idx0, idx1);
@@ -209,12 +211,14 @@ function getPathBuilder({
barAlignment = BarAlignment.Center,
barWidthFactor = 0.6,
barMaxWidth = 200,
stepInterval,
}: {
drawStyle: DrawStyle;
lineInterpolation?: LineInterpolation;
barAlignment?: BarAlignment;
barMaxWidth?: number;
barWidthFactor?: number;
stepInterval?: number;
}): Series.PathBuilder {
if (!builders) {
throw new Error('Required uPlot path builders are not available');
@@ -222,14 +226,13 @@ function getPathBuilder({
if (drawStyle === DrawStyle.Bar) {
const pathBuilders = uPlot.paths;
const barsConfigKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
if (!builders[barsConfigKey] && pathBuilders.bars) {
builders[barsConfigKey] = pathBuilders.bars({
size: [barWidthFactor, barMaxWidth],
align: barAlignment,
});
}
return builders[barsConfigKey];
return getBarPathBuilder({
pathBuilders,
barAlignment,
barWidthFactor,
barMaxWidth,
stepInterval,
});
}
if (drawStyle === DrawStyle.Line) {
@@ -247,4 +250,81 @@ function getPathBuilder({
return builders.spline;
}
// eslint-disable-next-line sonarjs/cognitive-complexity
function getBarPathBuilder({
pathBuilders,
barAlignment,
barWidthFactor,
barMaxWidth,
stepInterval,
}: {
pathBuilders: typeof uPlot.paths;
barAlignment: BarAlignment;
barWidthFactor: number;
barMaxWidth: number;
stepInterval?: number;
}): Series.PathBuilder {
if (!builders) {
throw new Error('Required uPlot path builders are not available');
}
const barsPathBuilderFactory = pathBuilders.bars;
// When a stepInterval is provided (in seconds), cap the maximum bar width
// so that a single bar never visually spans more than stepInterval worth
// of time on the x-scale.
if (
typeof stepInterval === 'number' &&
stepInterval > 0 &&
barsPathBuilderFactory
) {
return (
self: uPlot,
seriesIdx: number,
idx0: number,
idx1: number,
): Series.Paths | null => {
let effectiveBarMaxWidth = barMaxWidth;
const xScale = self.scales.x as uPlot.Scale | undefined;
if (xScale && typeof xScale.min === 'number') {
const start = xScale.min as number;
const end = start + stepInterval;
const startPx = self.valToPos(start, 'x');
const endPx = self.valToPos(end, 'x');
const intervalPx = Math.abs(endPx - startPx);
if (intervalPx > 0) {
effectiveBarMaxWidth =
typeof barMaxWidth === 'number'
? Math.min(barMaxWidth, intervalPx)
: intervalPx;
}
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${effectiveBarMaxWidth}`;
if (builders && !builders[barsCfgKey]) {
builders[barsCfgKey] = barsPathBuilderFactory({
size: [barWidthFactor, effectiveBarMaxWidth],
align: barAlignment,
});
}
return builders && builders[barsCfgKey]
? builders[barsCfgKey](self, seriesIdx, idx0, idx1)
: null;
};
}
const barsCfgKey = `bars|${barAlignment}|${barWidthFactor}|${barMaxWidth}`;
if (!builders[barsCfgKey] && barsPathBuilderFactory) {
builders[barsCfgKey] = barsPathBuilderFactory({
size: [barWidthFactor, barMaxWidth],
align: barAlignment,
});
}
return builders[barsCfgKey];
}
export type { SeriesProps };

View File

@@ -176,6 +176,7 @@ export interface SeriesProps extends LineConfig, PointsConfig, BarConfig {
show?: boolean;
spanGaps?: boolean;
isDarkMode?: boolean;
stepInterval?: number;
}
export interface LegendItem {

View File

@@ -84,8 +84,6 @@ const DashboardContext = createContext<IDashboardContext>({
toScrollWidgetId: '',
setToScrollWidgetId: () => {},
updateLocalStorageDashboardVariables: () => {},
variablesToGetUpdated: [],
setVariablesToGetUpdated: () => {},
dashboardQueryRangeCalled: false,
setDashboardQueryRangeCalled: () => {},
selectedRowWidgetId: '',
@@ -183,10 +181,6 @@ export function DashboardProvider({
exact: true,
});
const [variablesToGetUpdated, setVariablesToGetUpdated] = useState<string[]>(
[],
);
const [layouts, setLayouts] = useState<Layout[]>([]);
const [panelMap, setPanelMap] = useState<
@@ -517,8 +511,6 @@ export function DashboardProvider({
updatedTimeRef,
setToScrollWidgetId,
updateLocalStorageDashboardVariables,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,
@@ -541,8 +533,6 @@ export function DashboardProvider({
toScrollWidgetId,
updateLocalStorageDashboardVariables,
currentDashboard,
variablesToGetUpdated,
setVariablesToGetUpdated,
dashboardQueryRangeCalled,
setDashboardQueryRangeCalled,
selectedRowWidgetId,

View File

@@ -0,0 +1,527 @@
import * as dashboardVariablesStore from '../dashboardVariables/dashboardVariablesStore';
import { IDependencyData } from '../dashboardVariables/dashboardVariablesStoreTypes';
import {
enqueueDescendantsOfVariable,
enqueueFetchOfAllVariables,
initializeVariableFetchStore,
onVariableFetchComplete,
onVariableFetchFailure,
VariableFetchContext,
variableFetchStore,
} from '../variableFetchStore';
const getVariableDependencyContextSpy = jest.spyOn(
dashboardVariablesStore,
'getVariableDependencyContext',
);
function resetStore(): void {
variableFetchStore.set(() => ({
states: {},
lastUpdated: {},
cycleIds: {},
}));
}
function mockContext(overrides: Partial<VariableFetchContext> = {}): void {
getVariableDependencyContextSpy.mockReturnValue({
doAllVariablesHaveValuesSelected: false,
variableTypes: {},
dynamicVariableOrder: [],
dependencyData: null,
...overrides,
});
}
/**
* Helper to build a dependency data object for tests.
* Only the fields used by the store actions are required.
*/
function buildDependencyData(
overrides: Partial<IDependencyData> = {},
): IDependencyData {
return {
order: [],
graph: {},
parentDependencyGraph: {},
transitiveDescendants: {},
hasCycle: false,
...overrides,
};
}
describe('variableFetchStore', () => {
beforeEach(() => {
resetStore();
jest.clearAllMocks();
});
// ==================== initializeVariableFetchStore ====================
describe('initializeVariableFetchStore', () => {
it('should initialize new variables to idle', () => {
initializeVariableFetchStore(['a', 'b', 'c']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({ a: 'idle', b: 'idle', c: 'idle' });
});
it('should preserve existing states for known variables', () => {
// Pre-set a state
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
initializeVariableFetchStore(['a', 'b']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('idle');
});
it('should clean up stale variables that no longer exist', () => {
variableFetchStore.update((d) => {
d.states.old = 'idle';
d.lastUpdated.old = 100;
d.cycleIds.old = 3;
});
initializeVariableFetchStore(['a']);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.old).toBeUndefined();
expect(storeSnapshot.lastUpdated.old).toBeUndefined();
expect(storeSnapshot.cycleIds.old).toBeUndefined();
expect(storeSnapshot.states.a).toBe('idle');
});
it('should handle empty variable names array', () => {
variableFetchStore.update((d) => {
d.states.a = 'idle';
});
initializeVariableFetchStore([]);
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states).toEqual({});
});
});
// ==================== enqueueFetchOfAllVariables ====================
describe('enqueueFetchOfAllVariables', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
expect(variableFetchStore.getSnapshot().states.a).toBe('idle');
});
it('should set root query variables to loading and dependent ones to waiting', () => {
// a is root (no parents), b depends on a
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('loading');
expect(storeSnapshot.states.b).toBe('waiting');
});
it('should set root query variables to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a'],
parentDependencyGraph: { a: [] },
}),
variableTypes: { a: 'QUERY' },
});
// Pre-set lastUpdated so it appears previously fetched
variableFetchStore.update((d) => {
d.lastUpdated.a = 1000;
});
initializeVariableFetchStore(['a']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('revalidating');
});
it('should bump cycle IDs for all enqueued variables', () => {
mockContext({
dependencyData: buildDependencyData({
order: ['a', 'b'],
parentDependencyGraph: { a: [], b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
initializeVariableFetchStore(['a', 'b']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.cycleIds.a).toBe(1);
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set dynamic variables to waiting when not all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: false,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should set dynamic variables to loading when all variables have values', () => {
mockContext({
doAllVariablesHaveValuesSelected: true,
dependencyData: buildDependencyData({ order: [] }),
variableTypes: { dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
initializeVariableFetchStore(['dyn1']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should not treat non-QUERY parents as query parents', () => {
// b has a CUSTOM parent — shouldn't cause waiting
mockContext({
dependencyData: buildDependencyData({
order: ['b'],
parentDependencyGraph: { b: ['customVar'] },
}),
variableTypes: { b: 'QUERY', customVar: 'CUSTOM' },
});
initializeVariableFetchStore(['b', 'customVar']);
enqueueFetchOfAllVariables();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
});
});
// ==================== onVariableFetchComplete ====================
describe('onVariableFetchComplete', () => {
it('should set the completed variable to idle with a lastUpdated timestamp', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
const before = Date.now();
onVariableFetchComplete('a');
const after = Date.now();
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.lastUpdated.a).toBeGreaterThanOrEqual(before);
expect(storeSnapshot.lastUpdated.a).toBeLessThanOrEqual(after);
});
it('should unblock waiting query-type children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('idle');
expect(storeSnapshot.states.b).toBe('loading');
});
it('should not unblock non-QUERY children', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// dyn1 is DYNAMIC, not QUERY, so it should remain waiting
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables are settled', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: [] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('loading');
});
it('should NOT unlock dynamic variables if a query variable is still in-flight', () => {
mockContext({
dependencyData: buildDependencyData({
graph: { a: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.dyn1 = 'waiting';
});
onVariableFetchComplete('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.dyn1).toBe('waiting');
});
});
// ==================== onVariableFetchFailure ====================
describe('onVariableFetchFailure', () => {
it('should set the failed variable to error', () => {
mockContext();
variableFetchStore.update((d) => {
d.states.a = 'loading';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
});
it('should set query-type transitive descendants to idle', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.b = 'waiting';
d.states.c = 'waiting';
});
onVariableFetchFailure('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.a).toBe('error');
expect(storeSnapshot.states.b).toBe('idle');
expect(storeSnapshot.states.c).toBe('idle');
});
it('should not touch non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('waiting');
});
it('should unlock waiting dynamic variables when all query variables settle via error', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
dynamicVariableOrder: ['dyn1'],
});
variableFetchStore.update((d) => {
d.states.a = 'loading';
d.states.dyn1 = 'waiting';
});
onVariableFetchFailure('a');
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('loading');
});
});
// ==================== enqueueDescendantsOfVariable ====================
describe('enqueueDescendantsOfVariable', () => {
it('should no-op when dependencyData is null', () => {
mockContext({ dependencyData: null });
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('idle');
});
it('should enqueue query-type descendants with all parents settled', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
expect(storeSnapshot.states.b).toBe('loading');
expect(storeSnapshot.cycleIds.b).toBe(1);
});
it('should set descendants to waiting when some parents are not settled', () => {
// b depends on both a and c; c is still loading
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a', 'c'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'loading';
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('waiting');
});
it('should skip non-QUERY descendants', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['dyn1'] },
parentDependencyGraph: {},
}),
variableTypes: { a: 'QUERY', dyn1: 'DYNAMIC' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.dyn1 = 'idle';
});
enqueueDescendantsOfVariable('a');
// dyn1 is DYNAMIC, so it should not be touched
expect(variableFetchStore.getSnapshot().states.dyn1).toBe('idle');
});
it('should handle chain of descendants: a -> b -> c', () => {
// a -> b -> c, all QUERY
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b', 'c'] },
parentDependencyGraph: { b: ['a'], c: ['b'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY', c: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.states.c = 'idle';
});
enqueueDescendantsOfVariable('a');
const storeSnapshot = variableFetchStore.getSnapshot();
// b's parent (a) is idle/settled → loading
expect(storeSnapshot.states.b).toBe('loading');
// c's parent (b) just moved to loading (not settled) → waiting
expect(storeSnapshot.states.c).toBe('waiting');
});
it('should set descendants to revalidating when previously fetched', () => {
mockContext({
dependencyData: buildDependencyData({
transitiveDescendants: { a: ['b'] },
parentDependencyGraph: { b: ['a'] },
}),
variableTypes: { a: 'QUERY', b: 'QUERY' },
});
variableFetchStore.update((d) => {
d.states.a = 'idle';
d.states.b = 'idle';
d.lastUpdated.b = 1000;
});
enqueueDescendantsOfVariable('a');
expect(variableFetchStore.getSnapshot().states.b).toBe('revalidating');
});
});
});

View File

@@ -0,0 +1,196 @@
import {
IVariableFetchStoreState,
VariableFetchState,
} from '../variableFetchStore';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from '../variableFetchStoreUtils';
describe('variableFetchStoreUtils', () => {
describe('isSettled', () => {
it('should return true for idle state', () => {
expect(isSettled('idle')).toBe(true);
});
it('should return true for error state', () => {
expect(isSettled('error')).toBe(true);
});
it('should return false for loading state', () => {
expect(isSettled('loading')).toBe(false);
});
it('should return false for revalidating state', () => {
expect(isSettled('revalidating')).toBe(false);
});
it('should return false for waiting state', () => {
expect(isSettled('waiting')).toBe(false);
});
it('should return false for undefined', () => {
expect(isSettled(undefined)).toBe(false);
});
});
describe('resolveFetchState', () => {
it('should return "loading" when variable has never been fetched', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: {},
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "loading" when lastUpdated is 0', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 0 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('loading');
});
it('should return "revalidating" when variable has been fetched before', () => {
const draft: IVariableFetchStoreState = {
states: {},
lastUpdated: { myVar: 1000 },
cycleIds: {},
};
expect(resolveFetchState(draft, 'myVar')).toBe('revalidating');
});
});
describe('areAllQueryVariablesSettled', () => {
it('should return true when all query variables are idle', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'idle',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when all query variables are in error', () => {
const states: Record<string, VariableFetchState> = {
a: 'error',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true with a mix of idle and error query variables', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'error',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return false when any query variable is loading', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'loading',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should return false when any query variable is waiting', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
b: 'waiting',
};
const variableTypes = { a: 'QUERY' as const, b: 'QUERY' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(false);
});
it('should ignore non-QUERY variable types', () => {
const states: Record<string, VariableFetchState> = {
a: 'idle',
dynVar: 'loading',
};
const variableTypes = {
a: 'QUERY' as const,
dynVar: 'DYNAMIC' as const,
};
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
it('should return true when there are no QUERY variables', () => {
const states: Record<string, VariableFetchState> = {
dynVar: 'loading',
};
const variableTypes = { dynVar: 'DYNAMIC' as const };
expect(areAllQueryVariablesSettled(states, variableTypes)).toBe(true);
});
});
describe('unlockWaitingDynamicVariables', () => {
it('should transition waiting dynamic variables to loading when never fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting', dyn2: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('loading');
expect(draft.states.dyn2).toBe('loading');
});
it('should transition waiting dynamic variables to revalidating when previously fetched', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: { dyn1: 1000 },
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1']);
expect(draft.states.dyn1).toBe('revalidating');
});
it('should not touch dynamic variables that are not in waiting state', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'idle', dyn2: 'loading' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, ['dyn1', 'dyn2']);
expect(draft.states.dyn1).toBe('idle');
expect(draft.states.dyn2).toBe('loading');
});
it('should handle empty dynamic variable order', () => {
const draft: IVariableFetchStoreState = {
states: { dyn1: 'waiting' },
lastUpdated: {},
cycleIds: {},
};
unlockWaitingDynamicVariables(draft, []);
expect(draft.states.dyn1).toBe('waiting');
});
});
});

View File

@@ -0,0 +1,225 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
dashboardVariablesStore,
getVariableDependencyContext,
setDashboardVariablesStore,
updateDashboardVariablesStore,
} from '../dashboardVariablesStore';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
function createVariable(
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable {
return {
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
};
}
function resetStore(): void {
dashboardVariablesStore.set(() => ({
dashboardId: '',
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
}));
}
describe('dashboardVariablesStore', () => {
beforeEach(() => {
resetStore();
});
describe('setDashboardVariablesStore', () => {
it('should set the dashboard variables and compute derived values', () => {
const variables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
};
setDashboardVariablesStore({ dashboardId: 'dash-1', variables });
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-1');
expect(storeSnapshot.variables).toEqual(variables);
expect(storeSnapshot.variableTypes).toEqual({ env: 'QUERY' });
expect(storeSnapshot.sortedVariablesArray).toHaveLength(1);
});
});
describe('updateDashboardVariablesStore', () => {
it('should update variables and recompute derived values', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
},
});
const updatedVariables: IDashboardVariables = {
env: createVariable({ name: 'env', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
};
updateDashboardVariablesStore({
dashboardId: 'dash-1',
variables: updatedVariables,
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(storeSnapshot.dynamicVariableOrder).toEqual(['dyn1']);
});
it('should replace dashboardId when it does not match', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
'not-there': createVariable({ name: 'not-there', order: 0 }),
},
});
updateDashboardVariablesStore({
dashboardId: 'dash-2',
variables: {
a: createVariable({ name: 'a', order: 0 }),
},
});
const storeSnapshot = dashboardVariablesStore.getSnapshot();
expect(storeSnapshot.dashboardId).toBe('dash-2');
expect(storeSnapshot.variableTypes).toEqual({
a: 'QUERY',
});
expect(storeSnapshot.variableTypes).not.toEqual({
'not-there': 'QUERY',
});
});
});
describe('getVariableDependencyContext', () => {
it('should return context with all fields', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
},
});
const {
variableTypes,
dynamicVariableOrder,
dependencyData,
} = getVariableDependencyContext();
expect(variableTypes).toEqual({ env: 'QUERY' });
expect(dynamicVariableOrder).toEqual([]);
expect(dependencyData).not.toBeNull();
});
it('should report doAllVariablesHaveValuesSelected as true when all variables have selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: 'us-east',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report doAllVariablesHaveValuesSelected as false when some variables lack selectedValue', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
selectedValue: 'prod',
}),
region: createVariable({
name: 'region',
type: 'CUSTOM',
order: 1,
selectedValue: undefined,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
it('should treat DYNAMIC variable with allSelected=true and selectedValue=null as having a value', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: null as any,
allSelected: true,
}),
env: createVariable({
name: 'env',
type: 'QUERY',
order: 1,
selectedValue: 'prod',
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(true);
});
it('should report false when a DYNAMIC variable has empty selectedValue and allSelected is not true', () => {
setDashboardVariablesStore({
dashboardId: 'dash-1',
variables: {
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 0,
selectedValue: '',
allSelected: false,
}),
},
});
const { doAllVariablesHaveValuesSelected } = getVariableDependencyContext();
expect(doAllVariablesHaveValuesSelected).toBe(false);
});
});
});

View File

@@ -0,0 +1,369 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import { IDashboardVariables } from '../dashboardVariablesStoreTypes';
import {
buildDynamicVariableOrder,
buildSortedVariablesArray,
buildVariableTypesMap,
computeDerivedValues,
} from '../dashboardVariablesStoreUtils';
const createVariable = (
overrides: Partial<IDashboardVariable> = {},
): IDashboardVariable => ({
id: 'test-id',
name: 'test-var',
description: '',
type: 'QUERY',
sort: 'DISABLED',
showALLOption: false,
multiSelect: false,
order: 0,
...overrides,
});
describe('dashboardVariablesStoreUtils', () => {
describe('buildSortedVariablesArray', () => {
it('should sort variables by order property', () => {
const variables: IDashboardVariables = {
c: createVariable({ name: 'c', order: 3 }),
a: createVariable({ name: 'a', order: 1 }),
b: createVariable({ name: 'b', order: 2 }),
};
const result = buildSortedVariablesArray(variables);
expect(result.map((v) => v.name)).toEqual(['a', 'b', 'c']);
});
it('should return empty array for empty variables', () => {
const result = buildSortedVariablesArray({});
expect(result).toEqual([]);
});
it('should create copies of variables (not references)', () => {
const original = createVariable({ name: 'a', order: 0 });
const variables: IDashboardVariables = { a: original };
const result = buildSortedVariablesArray(variables);
expect(result[0]).not.toBe(original);
expect(result[0]).toEqual(original);
});
});
describe('buildVariableTypesMap', () => {
it('should create a name-to-type mapping', () => {
const sorted = [
createVariable({ name: 'env', type: 'QUERY' }),
createVariable({ name: 'region', type: 'CUSTOM' }),
createVariable({ name: 'dynVar', type: 'DYNAMIC' }),
createVariable({ name: 'text', type: 'TEXTBOX' }),
];
const result = buildVariableTypesMap(sorted);
expect(result).toEqual({
env: 'QUERY',
region: 'CUSTOM',
dynVar: 'DYNAMIC',
text: 'TEXTBOX',
});
});
it('should return empty object for empty array', () => {
expect(buildVariableTypesMap([])).toEqual({});
});
});
describe('buildDynamicVariableOrder', () => {
it('should return only DYNAMIC variable names in order', () => {
const sorted = [
createVariable({ name: 'queryVar', type: 'QUERY', order: 0 }),
createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
createVariable({ name: 'customVar', type: 'CUSTOM', order: 2 }),
createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
];
const result = buildDynamicVariableOrder(sorted);
expect(result).toEqual(['dyn1', 'dyn2']);
});
it('should return empty array when no DYNAMIC variables exist', () => {
const sorted = [
createVariable({ name: 'a', type: 'QUERY' }),
createVariable({ name: 'b', type: 'CUSTOM' }),
];
expect(buildDynamicVariableOrder(sorted)).toEqual([]);
});
it('should return empty array for empty input', () => {
expect(buildDynamicVariableOrder([])).toEqual([]);
});
});
describe('computeDerivedValues', () => {
it('should compute all derived values from variables', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 1,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(2);
expect(result.sortedVariablesArray[0].name).toBe('env');
expect(result.sortedVariablesArray[1].name).toBe('dyn1');
expect(result.variableTypes).toEqual({
env: 'QUERY',
dyn1: 'DYNAMIC',
});
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
// dependencyData should exist since there are variables
expect(result.dependencyData).not.toBeNull();
});
it('should return null dependencyData for empty variables', () => {
const result = computeDerivedValues({});
expect(result.sortedVariablesArray).toEqual([]);
expect(result.dependencyData).toBeNull();
expect(result.variableTypes).toEqual({});
expect(result.dynamicVariableOrder).toEqual([]);
});
it('should handle all four variable types together', () => {
const variables: IDashboardVariables = {
queryVar: createVariable({
name: 'queryVar',
type: 'QUERY',
order: 0,
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(4);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'queryVar',
'customVar',
'dynVar',
'textVar',
]);
expect(result.variableTypes).toEqual({
queryVar: 'QUERY',
customVar: 'CUSTOM',
dynVar: 'DYNAMIC',
textVar: 'TEXTBOX',
});
expect(result.dynamicVariableOrder).toEqual(['dynVar']);
expect(result.dependencyData).not.toBeNull();
});
it('should sort variables by order regardless of insertion order', () => {
const variables: IDashboardVariables = {
z: createVariable({ name: 'z', type: 'QUERY', order: 4 }),
a: createVariable({ name: 'a', type: 'CUSTOM', order: 0 }),
m: createVariable({ name: 'm', type: 'DYNAMIC', order: 2 }),
b: createVariable({ name: 'b', type: 'TEXTBOX', order: 1 }),
x: createVariable({ name: 'x', type: 'QUERY', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray.map((v) => v.name)).toEqual([
'a',
'b',
'm',
'x',
'z',
]);
});
it('should include multiple dynamic variables in order', () => {
const variables: IDashboardVariables = {
dyn3: createVariable({ name: 'dyn3', type: 'DYNAMIC', order: 5 }),
query1: createVariable({ name: 'query1', type: 'QUERY', order: 0 }),
dyn1: createVariable({ name: 'dyn1', type: 'DYNAMIC', order: 1 }),
custom1: createVariable({ name: 'custom1', type: 'CUSTOM', order: 2 }),
dyn2: createVariable({ name: 'dyn2', type: 'DYNAMIC', order: 3 }),
};
const result = computeDerivedValues(variables);
expect(result.dynamicVariableOrder).toEqual(['dyn1', 'dyn2', 'dyn3']);
});
it('should build dependency data with query variable order for dependent queries', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
service: createVariable({
name: 'service',
type: 'QUERY',
order: 1,
queryValue: 'SELECT DISTINCT service FROM table WHERE env={{.env}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData } = result;
expect(dependencyData).not.toBeNull();
// env should appear in the dependency order (it's a root QUERY variable)
expect(dependencyData?.order).toContain('env');
// service depends on env, so it should also be in the order
expect(dependencyData?.order).toContain('service');
// env comes before service in topological order
const envIdx = dependencyData?.order.indexOf('env') ?? -1;
const svcIdx = dependencyData?.order.indexOf('service') ?? -1;
expect(envIdx).toBeLessThan(svcIdx);
});
it('should not include non-QUERY variables in dependency order', () => {
const variables: IDashboardVariables = {
env: createVariable({
name: 'env',
type: 'QUERY',
order: 0,
queryValue: 'SELECT DISTINCT env FROM table',
}),
customVar: createVariable({
name: 'customVar',
type: 'CUSTOM',
order: 1,
}),
dynVar: createVariable({
name: 'dynVar',
type: 'DYNAMIC',
order: 2,
}),
textVar: createVariable({
name: 'textVar',
type: 'TEXTBOX',
order: 3,
}),
};
const result = computeDerivedValues(variables);
expect(result.dependencyData).not.toBeNull();
// Only QUERY variables should be in the dependency order
result.dependencyData?.order.forEach((name) => {
expect(result.variableTypes[name]).toBe('QUERY');
});
});
it('should produce transitive descendants in dependency data', () => {
const variables: IDashboardVariables = {
region: createVariable({
name: 'region',
type: 'QUERY',
order: 0,
queryValue: 'SELECT region FROM table',
}),
cluster: createVariable({
name: 'cluster',
type: 'QUERY',
order: 1,
queryValue: 'SELECT cluster FROM table WHERE region={{.region}}',
}),
host: createVariable({
name: 'host',
type: 'QUERY',
order: 2,
queryValue: 'SELECT host FROM table WHERE cluster={{.cluster}}',
}),
};
const result = computeDerivedValues(variables);
const { dependencyData: depData } = result;
expect(depData).not.toBeNull();
expect(depData?.transitiveDescendants).toBeDefined();
// region's transitive descendants should include cluster and host
expect(depData?.transitiveDescendants['region']).toEqual(
expect.arrayContaining(['cluster', 'host']),
);
});
it('should handle a single variable', () => {
const variables: IDashboardVariables = {
solo: createVariable({
name: 'solo',
type: 'QUERY',
order: 0,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(1);
expect(result.variableTypes).toEqual({ solo: 'QUERY' });
expect(result.dynamicVariableOrder).toEqual([]);
expect(result.dependencyData).not.toBeNull();
expect(result.dependencyData?.order).toEqual(['solo']);
});
it('should handle only non-QUERY variables', () => {
const variables: IDashboardVariables = {
custom1: createVariable({
name: 'custom1',
type: 'CUSTOM',
order: 0,
}),
text1: createVariable({
name: 'text1',
type: 'TEXTBOX',
order: 1,
}),
dyn1: createVariable({
name: 'dyn1',
type: 'DYNAMIC',
order: 2,
}),
};
const result = computeDerivedValues(variables);
expect(result.sortedVariablesArray).toHaveLength(3);
// No QUERY variables, so dependency order should be empty
expect(result.dependencyData?.order).toEqual([]);
expect(result.dynamicVariableOrder).toEqual(['dyn1']);
});
});
});

View File

@@ -1,4 +1,7 @@
import { isEmpty, isUndefined } from 'lodash-es';
import createStore from '../store';
import { VariableFetchContext } from '../variableFetchStore';
import { IDashboardVariablesStoreState } from './dashboardVariablesStoreTypes';
import {
computeDerivedValues,
@@ -10,6 +13,8 @@ const initialState: IDashboardVariablesStoreState = {
variables: {},
sortedVariablesArray: [],
dependencyData: null,
variableTypes: {},
dynamicVariableOrder: [],
};
export const dashboardVariablesStore = createStore<IDashboardVariablesStoreState>(
@@ -55,3 +60,38 @@ export function updateDashboardVariablesStore({
updateDerivedValues(draft);
});
}
/**
* Read current store snapshot as VariableFetchContext.
* Used by components to pass context to variableFetchStore actions
* without creating a circular import.
*/
export function getVariableDependencyContext(): VariableFetchContext {
const state = dashboardVariablesStore.getSnapshot();
// If every variable already has a selectedValue (e.g. persisted from
// localStorage/URL), dynamic variables can start in parallel.
// Otherwise they wait for query vars to settle first.
const doAllVariablesHaveValuesSelected = Object.values(state.variables).every(
(variable) => {
if (
variable.type === 'DYNAMIC' &&
variable.selectedValue === null &&
variable.allSelected === true
) {
return true;
}
return (
!isUndefined(variable.selectedValue) && !isEmpty(variable.selectedValue)
);
},
);
return {
doAllVariablesHaveValuesSelected,
variableTypes: state.variableTypes,
dynamicVariableOrder: state.dynamicVariableOrder,
dependencyData: state.dependencyData,
};
}

View File

@@ -1,11 +1,18 @@
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
export type VariableGraph = Record<string, string[]>;
export interface IDependencyData {
order: string[];
// Direct children for each variable
graph: VariableGraph;
// Direct parents for each variable
parentDependencyGraph: VariableGraph;
// Pre-computed transitive descendants for each node (all reachable nodes, not just direct children)
transitiveDescendants: VariableGraph;
hasCycle: boolean;
cycleNodes?: string[];
}
@@ -24,6 +31,12 @@ export interface IDashboardVariablesStoreState {
// Derived: dependency data for QUERY variables
dependencyData: IDependencyData | null;
// Derived: variable name → type mapping
variableTypes: Record<string, TVariableQueryType>;
// Derived: display-ordered list of dynamic variable names
dynamicVariableOrder: string[];
}
export interface IUseDashboardVariablesReturn {

View File

@@ -2,9 +2,11 @@ import {
buildDependencies,
buildDependencyGraph,
} from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { IDashboardVariable } from 'types/api/dashboard/getAll';
import {
IDashboardVariable,
TVariableQueryType,
} from 'types/api/dashboard/getAll';
import { initializeVariableFetchStore } from '../variableFetchStore';
import {
IDashboardVariables,
IDashboardVariablesStoreState,
@@ -44,6 +46,7 @@ export function buildDependencyData(
order,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
} = buildDependencyGraph(dependencies);
@@ -58,49 +61,62 @@ export function buildDependencyData(
order: queryVariableOrder,
graph,
parentDependencyGraph,
transitiveDescendants,
hasCycle,
cycleNodes,
};
}
/**
* Initialize the variable fetch store with the computed dependency data
* Build a variable name → type mapping from sorted variables array
*/
function initializeFetchStore(
export function buildVariableTypesMap(
sortedVariablesArray: IDashboardVariable[],
dependencyData: IDependencyData | null,
): void {
if (dependencyData) {
const allVariableNames = sortedVariablesArray
.map((v) => v.name)
.filter((name): name is string => !!name);
): Record<string, TVariableQueryType> {
const types: Record<string, TVariableQueryType> = {};
sortedVariablesArray.forEach((v) => {
if (v.name) {
types[v.name] = v.type;
}
});
return types;
}
initializeVariableFetchStore(
allVariableNames,
dependencyData.graph,
dependencyData.parentDependencyGraph,
);
}
/**
* Build display-ordered list of dynamic variable names
*/
export function buildDynamicVariableOrder(
sortedVariablesArray: IDashboardVariable[],
): string[] {
return sortedVariablesArray
.filter((v) => v.type === 'DYNAMIC' && v.name)
.map((v) => v.name as string);
}
/**
* Compute derived values from variables
* This is a composition of buildSortedVariablesArray and buildDependencyData
* Also initializes the variable fetch store with the new dependency data
*/
export function computeDerivedValues(
variables: IDashboardVariablesStoreState['variables'],
): Pick<
IDashboardVariablesStoreState,
'sortedVariablesArray' | 'dependencyData'
| 'sortedVariablesArray'
| 'dependencyData'
| 'variableTypes'
| 'dynamicVariableOrder'
> {
const sortedVariablesArray = buildSortedVariablesArray(variables);
const dependencyData = buildDependencyData(sortedVariablesArray);
const variableTypes = buildVariableTypesMap(sortedVariablesArray);
const dynamicVariableOrder = buildDynamicVariableOrder(sortedVariablesArray);
// Initialize the variable fetch store when dependency data is computed
initializeFetchStore(sortedVariablesArray, dependencyData);
return { sortedVariablesArray, dependencyData };
return {
sortedVariablesArray,
dependencyData,
variableTypes,
dynamicVariableOrder,
};
}
/**
@@ -112,7 +128,8 @@ export function updateDerivedValues(
): void {
draft.sortedVariablesArray = buildSortedVariablesArray(draft.variables);
draft.dependencyData = buildDependencyData(draft.sortedVariablesArray);
// Initialize the variable fetch store when dependency data is updated
initializeFetchStore(draft.sortedVariablesArray, draft.dependencyData);
draft.variableTypes = buildVariableTypesMap(draft.sortedVariablesArray);
draft.dynamicVariableOrder = buildDynamicVariableOrder(
draft.sortedVariablesArray,
);
}

View File

@@ -1,6 +1,12 @@
import { VariableGraph } from 'container/DashboardContainer/DashboardVariablesSelection/util';
import { getVariableDependencyContext } from './dashboardVariables/dashboardVariablesStore';
import { IDashboardVariablesStoreState } from './dashboardVariables/dashboardVariablesStoreTypes';
import createStore from './store';
import {
areAllQueryVariablesSettled,
isSettled,
resolveFetchState,
unlockWaitingDynamicVariables,
} from './variableFetchStoreUtils';
// Fetch state for each variable
export type VariableFetchState =
@@ -14,19 +20,29 @@ export interface IVariableFetchStoreState {
// Per-variable fetch state
states: Record<string, VariableFetchState>;
// Dependency graphs (set once when variables change)
dependencyGraph: VariableGraph; // variable -> children that depend on it
parentGraph: VariableGraph; // variable -> parents it depends on
// Track last update timestamp per variable to trigger re-fetches
// Track last update timestamp per variable
lastUpdated: Record<string, number>;
// Per-variable cycle counter — bumped when a variable needs to refetch.
// Used in react-query keys to auto-cancel stale requests for that variable only.
cycleIds: Record<string, number>;
}
/**
* Context from dashboardVariablesStore needed by fetch actions.
* Passed as parameter to avoid circular imports.
*/
export type VariableFetchContext = Pick<
IDashboardVariablesStoreState,
'variableTypes' | 'dynamicVariableOrder' | 'dependencyData'
> & {
doAllVariablesHaveValuesSelected: boolean;
};
const initialState: IVariableFetchStoreState = {
states: {},
dependencyGraph: {},
parentGraph: {},
lastUpdated: {},
cycleIds: {},
};
export const variableFetchStore = createStore<IVariableFetchStoreState>(
@@ -36,22 +52,183 @@ export const variableFetchStore = createStore<IVariableFetchStoreState>(
// ============== Actions ==============
/**
* Initialize the store with dependency graphs and set initial states
* Initialize the store with variable names.
* Called when dashboard variables change — sets up state entries.
*/
export function initializeVariableFetchStore(
variableNames: string[],
dependencyGraph: VariableGraph,
parentGraph: VariableGraph,
): void {
export function initializeVariableFetchStore(variableNames: string[]): void {
variableFetchStore.update((draft) => {
draft.dependencyGraph = dependencyGraph;
draft.parentGraph = parentGraph;
// Initialize all variables to idle, preserving existing ready states
// Initialize all variables to idle, preserving existing states
variableNames.forEach((name) => {
if (!draft.states[name]) {
draft.states[name] = 'idle';
}
});
// Clean up stale entries for variables that no longer exist
const nameSet = new Set(variableNames);
Object.keys(draft.states).forEach((name) => {
if (!nameSet.has(name)) {
delete draft.states[name];
delete draft.lastUpdated[name];
delete draft.cycleIds[name];
}
});
});
}
/**
* Start a full fetch cycle for all fetchable variables.
* Called on: initial load, time range change, or dependency graph change.
*
* Query variables with no query-type parents start immediately.
* Query variables with query-type parents get 'waiting'.
* Dynamic variables start immediately if all variables already have
* selectedValues (e.g. persisted from localStorage/URL). Otherwise they
* wait for all query variables to settle first.
*/
export function enqueueFetchOfAllVariables(): void {
const {
doAllVariablesHaveValuesSelected,
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { order: queryVariableOrder, parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
// Query variables: root ones start immediately, dependent ones wait
queryVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
const parents = parentDependencyGraph[name] || [];
const hasQueryParents = parents.some((p) => variableTypes[p] === 'QUERY');
if (hasQueryParents) {
draft.states[name] = 'waiting';
} else {
draft.states[name] = resolveFetchState(draft, name);
}
});
// Dynamic variables: start immediately if query variables have values,
// otherwise wait for query variables to settle first
dynamicVariableOrder.forEach((name) => {
draft.cycleIds[name] = (draft.cycleIds[name] || 0) + 1;
draft.states[name] = doAllVariablesHaveValuesSelected
? resolveFetchState(draft, name)
: 'waiting';
});
});
}
/**
* Mark a variable as completed. Unblocks waiting query-type children.
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchComplete(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'idle';
draft.lastUpdated[name] = Date.now();
if (!dependencyData) {
return;
}
const { graph } = dependencyData;
// Unblock waiting query-type children
const children = graph[name] || [];
children.forEach((child) => {
if (variableTypes[child] === 'QUERY' && draft.states[child] === 'waiting') {
draft.states[child] = resolveFetchState(draft, child);
}
});
// If all query variables are settled, unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Mark a variable as errored. Sets query-type descendants to idle
* (they can't proceed without this parent).
* If all query variables are now settled, unlocks any waiting dynamic variables.
*/
export function onVariableFetchFailure(name: string): void {
const {
dependencyData,
variableTypes,
dynamicVariableOrder,
} = getVariableDependencyContext();
variableFetchStore.update((draft) => {
draft.states[name] = 'error';
if (!dependencyData) {
return;
}
// Set query-type descendants to idle (can't fetch without parent)
const descendants = dependencyData.transitiveDescendants[name] || [];
descendants.forEach((desc) => {
if (variableTypes[desc] === 'QUERY') {
draft.states[desc] = 'idle';
}
});
// If all query variables are settled (error counts), unlock any waiting dynamic variables
if (
variableTypes[name] === 'QUERY' &&
areAllQueryVariablesSettled(draft.states, variableTypes)
) {
unlockWaitingDynamicVariables(draft, dynamicVariableOrder);
}
});
}
/**
* Cascade a value change to query-type descendants.
* Called when a user changes a variable's value (not from a fetch cycle).
*
* Direct children whose parents are all settled start immediately.
* Deeper descendants wait until their parents complete (BFS order
* ensures parents are set before children within a single update).
*/
export function enqueueDescendantsOfVariable(name: string): void {
const { dependencyData, variableTypes } = getVariableDependencyContext();
if (!dependencyData) {
return;
}
const { parentDependencyGraph } = dependencyData;
variableFetchStore.update((draft) => {
const descendants = dependencyData.transitiveDescendants[name] || [];
const queryDescendants = descendants.filter(
(desc) => variableTypes[desc] === 'QUERY',
);
queryDescendants.forEach((desc) => {
draft.cycleIds[desc] = (draft.cycleIds[desc] || 0) + 1;
const parents = parentDependencyGraph[desc] || [];
const allParentsSettled = parents.every((p) => isSettled(draft.states[p]));
draft.states[desc] = allParentsSettled
? resolveFetchState(draft, desc)
: 'waiting';
});
});
}

View File

@@ -0,0 +1,46 @@
import { TVariableQueryType } from 'types/api/dashboard/getAll';
import {
IVariableFetchStoreState,
VariableFetchState,
} from './variableFetchStore';
export function isSettled(state: VariableFetchState | undefined): boolean {
return state === 'idle' || state === 'error';
}
/**
* Resolve the next fetch state based on whether the variable has been fetched before.
*/
export function resolveFetchState(
draft: IVariableFetchStoreState,
name: string,
): VariableFetchState {
return (draft.lastUpdated[name] || 0) > 0 ? 'revalidating' : 'loading';
}
/**
* Check if all query variables are settled (idle or error).
*/
export function areAllQueryVariablesSettled(
states: Record<string, VariableFetchState>,
variableTypes: Record<string, TVariableQueryType>,
): boolean {
return Object.entries(variableTypes)
.filter(([, type]) => type === 'QUERY')
.every(([name]) => isSettled(states[name]));
}
/**
* Transition waiting dynamic variables to loading/revalidating if in 'waiting' state.
*/
export function unlockWaitingDynamicVariables(
draft: IVariableFetchStoreState,
dynamicVariableOrder: string[],
): void {
dynamicVariableOrder.forEach((dynName) => {
if (draft.states[dynName] === 'waiting') {
draft.states[dynName] = resolveFetchState(draft, dynName);
}
});
}

View File

@@ -47,8 +47,6 @@ export interface IDashboardContext {
allSelected: boolean,
isDynamic?: boolean,
) => void;
variablesToGetUpdated: string[];
setVariablesToGetUpdated: React.Dispatch<React.SetStateAction<string[]>>;
dashboardQueryRangeCalled: boolean;
setDashboardQueryRangeCalled: (value: boolean) => void;
selectedRowWidgetId: string | null;

View File

@@ -1,13 +1,14 @@
import { PANEL_TYPES } from 'constants/queryBuilder';
import { EQueryType } from 'types/common/dashboard';
import { Warning } from '..';
import { SuccessResponse, Warning } from '..';
import {
IBuilderFormula,
IBuilderQuery,
IClickHouseQuery,
IPromQLQuery,
} from '../queryBuilder/queryBuilderData';
import { ExecStats } from '../v5/queryRange';
import { QueryData, QueryDataV3 } from '../widgets/getQuery';
export type QueryRangePayload = {
@@ -35,8 +36,15 @@ export interface MetricRangePayloadProps {
newResult: MetricRangePayloadV3;
warnings?: string[];
};
meta?: ExecStats;
}
/** Query range success response including optional warning and meta */
export type MetricQueryRangeSuccessResponse = SuccessResponse<
MetricRangePayloadProps,
unknown
> & { warning?: Warning; meta?: ExecStats };
export interface MetricRangePayloadV3 {
data: {
result: QueryDataV3[];
@@ -44,4 +52,5 @@ export interface MetricRangePayloadV3 {
warnings?: string[];
};
warning?: Warning;
meta?: ExecStats;
}

View File

@@ -334,6 +334,7 @@ export interface ExecStats {
rowsScanned: number;
bytesScanned: number;
durationMs: number;
stepIntervals: Record<string, number>;
}
export interface Label {

View File

@@ -10,6 +10,27 @@ type Config struct {
type Templates struct {
Directory string `mapstructure:"directory"`
Format Format `mapstructure:"format"`
}
type Format struct {
Header Header `mapstructure:"header"`
Help Help `mapstructure:"help"`
Footer Footer `mapstructure:"footer"`
}
type Header struct {
Enabled bool `mapstructure:"enabled"`
LogoURL string `mapstructure:"logo_url"`
}
type Help struct {
Enabled bool `mapstructure:"enabled"`
Email string `mapstructure:"email"`
}
type Footer struct {
Enabled bool `mapstructure:"enabled"`
}
type SMTP struct {
@@ -45,6 +66,19 @@ func newConfig() factory.Config {
Enabled: false,
Templates: Templates{
Directory: "/root/templates",
Format: Format{
Header: Header{
Enabled: false,
LogoURL: "",
},
Help: Help{
Enabled: false,
Email: "",
},
Footer: Footer{
Enabled: false,
},
},
},
SMTP: SMTP{
Address: "localhost:25",

View File

@@ -15,6 +15,7 @@ type provider struct {
settings factory.ScopedProviderSettings
store emailtypes.TemplateStore
client *client.Client
config emailing.Config
}
func NewFactory() factory.ProviderFactory[emailing.Emailing, emailing.Config] {
@@ -55,7 +56,12 @@ func New(ctx context.Context, providerSettings factory.ProviderSettings, config
return nil, err
}
return &provider{settings: settings, store: store, client: client}, nil
return &provider{
settings: settings,
store: store,
client: client,
config: config,
}, nil
}
func (provider *provider) SendHTML(ctx context.Context, to string, subject string, templateName emailtypes.TemplateName, data map[string]any) error {
@@ -69,8 +75,19 @@ func (provider *provider) SendHTML(ctx context.Context, to string, subject strin
return err
}
// if no data is provided, create an empty map to prevent a panic when we add the format, to, and subject data
if data == nil {
data = make(map[string]any)
}
// the following are overridden if provided in the data map
data["format"] = provider.config.Templates.Format
data["to"] = to
data["subject"] = subject
content, err := emailtypes.NewContent(template, data)
if err != nil {
provider.settings.Logger().ErrorContext(ctx, "failed to create email content", "error", err)
return err
}

View File

@@ -30,3 +30,7 @@ func (module *getter) ListByOwnedKeyRange(ctx context.Context) ([]*types.Organiz
return module.store.ListByKeyRange(ctx, start, end)
}
func (module *getter) GetByName(ctx context.Context, name string) (*types.Organization, error) {
return module.store.GetByName(ctx, name)
}

View File

@@ -47,6 +47,22 @@ func (store *store) Get(ctx context.Context, id valuer.UUID) (*types.Organizatio
return organization, nil
}
func (store *store) GetByName(ctx context.Context, name string) (*types.Organization, error) {
organization := new(types.Organization)
err := store.
sqlstore.
BunDB().
NewSelect().
Model(organization).
Where("name = ?", name).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrOrganizationNotFound, "organization with name %s does not exist", name)
}
return organization, nil
}
func (store *store) GetAll(ctx context.Context) ([]*types.Organization, error) {
organizations := make([]*types.Organization, 0)
err := store.

View File

@@ -14,6 +14,9 @@ type Getter interface {
// ListByOwnedKeyRange gets all the organizations owned by the instance
ListByOwnedKeyRange(context.Context) ([]*types.Organization, error)
// Gets the organization by name
GetByName(context.Context, string) (*types.Organization, error)
}
type Setter interface {

View File

@@ -151,6 +151,10 @@ func (module *module) CreateCallbackAuthNSession(ctx context.Context, authNProvi
return "", err
}
if err := user.ErrIfRoot(); err != nil {
return "", errors.WithAdditionalf(err, "root user can only authenticate via password")
}
token, err := module.tokenizer.CreateToken(ctx, authtypes.NewIdentity(user.ID, user.OrgID, user.Email, user.Role), map[string]string{})
if err != nil {
return "", err

View File

@@ -5,11 +5,22 @@ import (
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/valuer"
)
type Config struct {
Password PasswordConfig `mapstructure:"password"`
Root RootConfig `mapstructure:"root"`
}
type RootConfig struct {
Enabled bool `mapstructure:"enabled"`
Email valuer.Email `mapstructure:"email"`
Password string `mapstructure:"password"`
OrgName string `mapstructure:"org_name"`
}
type PasswordConfig struct {
Reset ResetConfig `mapstructure:"reset"`
}
@@ -31,6 +42,10 @@ func newConfig() factory.Config {
MaxTokenLifetime: 6 * time.Hour,
},
},
Root: RootConfig{
Enabled: false,
OrgName: "default",
},
}
}
@@ -39,5 +54,17 @@ func (c Config) Validate() error {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::password::reset::max_token_lifetime must be positive")
}
if c.Root.Enabled {
if c.Root.Email.IsZero() {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::email is required when root user is enabled")
}
if c.Root.Password == "" {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password is required when root user is enabled")
}
if !types.IsPasswordValid(c.Root.Password) {
return errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "user::root::password does not meet password requirements")
}
}
return nil
}

View File

@@ -16,6 +16,10 @@ func NewGetter(store types.UserStore) user.Getter {
return &getter{store: store}
}
func (module *getter) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
return module.store.GetRootUserByOrgID(ctx, orgID)
}
func (module *getter) ListByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.User, error) {
users, err := module.store.ListUsersByOrgID(ctx, orgID)
if err != nil {

View File

@@ -22,8 +22,6 @@ import (
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
"github.com/dustin/go-humanize"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
type Module struct {
@@ -105,6 +103,12 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
return nil, err
}
if existingUser != nil {
if err := existingUser.ErrIfRoot(); err != nil {
return nil, errors.WithAdditionalf(err, "cannot send invite to root user")
}
}
if existingUser != nil {
return nil, errors.New(errors.TypeAlreadyExists, errors.CodeAlreadyExists, "User already exists with the same email")
}
@@ -146,11 +150,9 @@ func (m *Module) CreateBulkInvite(ctx context.Context, orgID valuer.UUID, userID
continue
}
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You are invited to join a team in SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"CustomerName": invites[i].Name,
"InviterName": creator.DisplayName,
"InviterEmail": creator.Email,
"Link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
if err := m.emailing.SendHTML(ctx, invites[i].Email.String(), "You're Invited to Join SigNoz", emailtypes.TemplateNameInvitationEmail, map[string]any{
"inviter_email": creator.Email,
"link": fmt.Sprintf("%s/signup?token=%s", bulkInvites.Invites[i].FrontendBaseUrl, invites[i].Token),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
@@ -206,27 +208,21 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
return nil, err
}
if err := existingUser.ErrIfRoot(); err != nil {
return nil, errors.WithAdditionalf(err, "cannot update root user")
}
requestor, err := m.store.GetUser(ctx, valuer.MustNewUUID(updatedBy))
if err != nil {
return nil, err
}
// only displayName, role can be updated
if user.DisplayName == "" {
user.DisplayName = existingUser.DisplayName
}
if user.Role == "" {
user.Role = existingUser.Role
}
if user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
if user.Role != "" && user.Role != existingUser.Role && requestor.Role != types.RoleAdmin {
return nil, errors.New(errors.TypeForbidden, errors.CodeForbidden, "only admins can change roles")
}
// Make sure that th e request is not demoting the last admin user.
// also an admin user can only change role of their own or other user
if user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
// Make sure that the request is not demoting the last admin user.
if user.Role != "" && user.Role != existingUser.Role && existingUser.Role == types.RoleAdmin {
adminUsers, err := m.store.GetUsersByRoleAndOrgID(ctx, types.RoleAdmin, orgID)
if err != nil {
return nil, err
@@ -237,7 +233,7 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
}
if user.Role != existingUser.Role {
if user.Role != "" && user.Role != existingUser.Role {
err = m.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(existingUser.Role),
@@ -249,35 +245,28 @@ func (m *Module) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, u
}
}
user.UpdatedAt = time.Now()
updatedUser, err := m.store.UpdateUser(ctx, orgID, id, user)
if err != nil {
existingUser.Update(user.DisplayName, user.Role)
if err := m.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
return nil, err
}
traits := types.NewTraitsFromUser(updatedUser)
m.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
return existingUser, nil
}
traits["updated_by"] = updatedBy
m.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
// if the role is updated then send an email
if existingUser.Role != updatedUser.Role {
if err := m.emailing.SendHTML(ctx, existingUser.Email.String(), "Your Role Has Been Updated in SigNoz", emailtypes.TemplateNameUpdateRole, map[string]any{
"CustomerName": existingUser.DisplayName,
"UpdatedByEmail": requestor.Email,
"OldRole": cases.Title(language.English).String(strings.ToLower(existingUser.Role.String())),
"NewRole": cases.Title(language.English).String(strings.ToLower(updatedUser.Role.String())),
}); err != nil {
m.settings.Logger().ErrorContext(ctx, "failed to send email", "error", err)
}
func (module *Module) UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
if err := module.store.UpdateUser(ctx, orgID, user); err != nil {
return err
}
if err := m.tokenizer.DeleteIdentity(ctx, valuer.MustNewUUID(id)); err != nil {
return nil, err
traits := types.NewTraitsFromUser(user)
module.analytics.IdentifyUser(ctx, user.OrgID.String(), user.ID.String(), traits)
module.analytics.TrackUser(ctx, user.OrgID.String(), user.ID.String(), "User Updated", traits)
if err := module.tokenizer.DeleteIdentity(ctx, user.ID); err != nil {
return err
}
return updatedUser, nil
return nil
}
func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error {
@@ -286,6 +275,10 @@ func (module *Module) DeleteUser(ctx context.Context, orgID valuer.UUID, id stri
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot delete root user")
}
if slices.Contains(types.AllIntegrationUserEmails, types.IntegrationUserEmail(user.Email.String())) {
return errors.New(errors.TypeForbidden, errors.CodeForbidden, "integration user cannot be deleted")
}
@@ -380,6 +373,10 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot reset password for root user")
}
token, err := module.GetOrCreateResetPasswordToken(ctx, user.ID)
if err != nil {
module.settings.Logger().ErrorContext(ctx, "failed to create reset password token", "error", err)
@@ -394,10 +391,9 @@ func (module *Module) ForgotPassword(ctx context.Context, orgID valuer.UUID, ema
if err := module.emailing.SendHTML(
ctx,
user.Email.String(),
"Reset your SigNoz password",
"A Password Reset Was Requested for SigNoz",
emailtypes.TemplateNameResetPassword,
map[string]any{
"Name": user.DisplayName,
"Link": resetLink,
"Expiry": humanizedTokenLifetime,
},
@@ -424,6 +420,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
return err
}
user, err := module.store.GetUser(ctx, valuer.MustNewUUID(password.UserID))
if err != nil {
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot reset password for root user")
}
if err := password.Update(passwd); err != nil {
return err
}
@@ -432,6 +437,15 @@ func (module *Module) UpdatePasswordByResetPasswordToken(ctx context.Context, to
}
func (module *Module) UpdatePassword(ctx context.Context, userID valuer.UUID, oldpasswd string, passwd string) error {
user, err := module.store.GetUser(ctx, userID)
if err != nil {
return err
}
if err := user.ErrIfRoot(); err != nil {
return errors.WithAdditionalf(err, "cannot change password for root user")
}
password, err := module.store.GetPasswordByUserID(ctx, userID)
if err != nil {
return err
@@ -493,7 +507,7 @@ func (m *Module) RevokeAPIKey(ctx context.Context, id, removedByUserID valuer.UU
}
func (module *Module) CreateFirstUser(ctx context.Context, organization *types.Organization, name string, email valuer.Email, passwd string) (*types.User, error) {
user, err := types.NewUser(name, email, types.RoleAdmin, organization.ID)
user, err := types.NewRootUser(name, email, organization.ID)
if err != nil {
return nil, err
}

View File

@@ -0,0 +1,187 @@
package impluser
import (
"context"
"time"
"github.com/SigNoz/signoz/pkg/authz"
"github.com/SigNoz/signoz/pkg/errors"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/modules/organization"
"github.com/SigNoz/signoz/pkg/modules/user"
"github.com/SigNoz/signoz/pkg/types"
"github.com/SigNoz/signoz/pkg/types/authtypes"
"github.com/SigNoz/signoz/pkg/types/roletypes"
"github.com/SigNoz/signoz/pkg/valuer"
)
type service struct {
settings factory.ScopedProviderSettings
store types.UserStore
module user.Module
orgGetter organization.Getter
authz authz.AuthZ
config user.RootConfig
stopC chan struct{}
}
func NewService(
providerSettings factory.ProviderSettings,
store types.UserStore,
module user.Module,
orgGetter organization.Getter,
authz authz.AuthZ,
config user.RootConfig,
) user.Service {
return &service{
settings: factory.NewScopedProviderSettings(providerSettings, "go.signoz.io/pkg/modules/user"),
store: store,
module: module,
orgGetter: orgGetter,
authz: authz,
config: config,
stopC: make(chan struct{}),
}
}
func (s *service) Start(ctx context.Context) error {
if !s.config.Enabled {
<-s.stopC
return nil
}
ticker := time.NewTicker(10 * time.Second)
defer ticker.Stop()
for {
err := s.reconcile(ctx)
if err == nil {
s.settings.Logger().InfoContext(ctx, "root user reconciliation completed successfully")
<-s.stopC
return nil
}
s.settings.Logger().WarnContext(ctx, "root user reconciliation failed, retrying", "error", err)
select {
case <-s.stopC:
return nil
case <-ticker.C:
continue
}
}
}
func (s *service) Stop(ctx context.Context) error {
close(s.stopC)
return nil
}
func (s *service) reconcile(ctx context.Context) error {
org, err := s.orgGetter.GetByName(ctx, s.config.OrgName)
if err != nil {
if errors.Ast(err, errors.TypeNotFound) {
newOrg := types.NewOrganizationWithName(s.config.OrgName)
_, err := s.module.CreateFirstUser(ctx, newOrg, s.config.Email.String(), s.config.Email, s.config.Password)
return err
}
return err
}
return s.reconcileRootUser(ctx, org.ID)
}
func (s *service) reconcileRootUser(ctx context.Context, orgID valuer.UUID) error {
existingRoot, err := s.store.GetRootUserByOrgID(ctx, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}
if existingRoot == nil {
return s.createOrPromoteRootUser(ctx, orgID)
}
return s.updateExistingRootUser(ctx, orgID, existingRoot)
}
func (s *service) createOrPromoteRootUser(ctx context.Context, orgID valuer.UUID) error {
existingUser, err := s.store.GetUserByEmailAndOrgID(ctx, s.config.Email, orgID)
if err != nil && !errors.Ast(err, errors.TypeNotFound) {
return err
}
if existingUser != nil {
oldRole := existingUser.Role
existingUser.PromoteToRoot()
if err := s.module.UpdateAnyUser(ctx, orgID, existingUser); err != nil {
return err
}
if oldRole != types.RoleAdmin {
if err := s.authz.ModifyGrant(ctx,
orgID,
roletypes.MustGetSigNozManagedRoleFromExistingRole(oldRole),
roletypes.MustGetSigNozManagedRoleFromExistingRole(types.RoleAdmin),
authtypes.MustNewSubject(authtypes.TypeableUser, existingUser.ID.StringValue(), orgID, nil),
); err != nil {
return err
}
}
return s.setPassword(ctx, existingUser.ID)
}
// Create new root user
newUser, err := types.NewRootUser(s.config.Email.String(), s.config.Email, orgID)
if err != nil {
return err
}
factorPassword, err := types.NewFactorPassword(s.config.Password, newUser.ID.StringValue())
if err != nil {
return err
}
return s.module.CreateUser(ctx, newUser, user.WithFactorPassword(factorPassword))
}
func (s *service) updateExistingRootUser(ctx context.Context, orgID valuer.UUID, existingRoot *types.User) error {
existingRoot.PromoteToRoot()
if existingRoot.Email != s.config.Email {
existingRoot.UpdateEmail(s.config.Email)
if err := s.module.UpdateAnyUser(ctx, orgID, existingRoot); err != nil {
return err
}
}
return s.setPassword(ctx, existingRoot.ID)
}
func (s *service) setPassword(ctx context.Context, userID valuer.UUID) error {
password, err := s.store.GetPasswordByUserID(ctx, userID)
if err != nil {
if !errors.Ast(err, errors.TypeNotFound) {
return err
}
factorPassword, err := types.NewFactorPassword(s.config.Password, userID.StringValue())
if err != nil {
return err
}
return s.store.CreatePassword(ctx, factorPassword)
}
if !password.Equals(s.config.Password) {
if err := password.Update(s.config.Password); err != nil {
return err
}
return s.store.UpdatePassword(ctx, password)
}
return nil
}

View File

@@ -210,20 +210,24 @@ func (store *store) GetUsersByRoleAndOrgID(ctx context.Context, role types.Role,
return users, nil
}
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User) (*types.User, error) {
user.UpdatedAt = time.Now()
_, err := store.sqlstore.BunDB().NewUpdate().
func (store *store) UpdateUser(ctx context.Context, orgID valuer.UUID, user *types.User) error {
_, err := store.
sqlstore.
BunDBCtx(ctx).
NewUpdate().
Model(user).
Column("display_name").
Column("email").
Column("role").
Column("is_root").
Column("updated_at").
Where("id = ?", id).
Where("org_id = ?", orgID).
Where("id = ?", user.ID).
Exec(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user with id: %s does not exist in org: %s", id, orgID)
return store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "user does not exist in org: %s", orgID)
}
return user, nil
return nil
}
func (store *store) ListUsersByOrgID(ctx context.Context, orgID valuer.UUID) ([]*types.GettableUser, error) {
@@ -602,6 +606,22 @@ func (store *store) RunInTx(ctx context.Context, cb func(ctx context.Context) er
})
}
func (store *store) GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*types.User, error) {
user := new(types.User)
err := store.
sqlstore.
BunDBCtx(ctx).
NewSelect().
Model(user).
Where("org_id = ?", orgID).
Where("is_root = ?", true).
Scan(ctx)
if err != nil {
return nil, store.sqlstore.WrapNotFoundErrf(err, types.ErrCodeUserNotFound, "root user for org %s not found", orgID)
}
return user, nil
}
func (store *store) ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*types.User, error) {
users := []*types.User{}
err := store.

View File

@@ -0,0 +1,7 @@
package user
import "github.com/SigNoz/signoz/pkg/factory"
type Service interface {
factory.Service
}

View File

@@ -34,6 +34,9 @@ type Module interface {
ForgotPassword(ctx context.Context, orgID valuer.UUID, email valuer.Email, frontendBaseURL string) error
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *types.User, updatedBy string) (*types.User, error)
// UpdateAnyUser updates a user and persists the changes to the database along with the analytics and identity deletion.
UpdateAnyUser(ctx context.Context, orgID valuer.UUID, user *types.User) error
DeleteUser(ctx context.Context, orgID valuer.UUID, id string, deletedBy string) error
// invite
@@ -54,6 +57,9 @@ type Module interface {
}
type Getter interface {
// Get root user by org id.
GetRootUserByOrgID(context.Context, valuer.UUID) (*types.User, error)
// Get gets the users based on the given id
ListByOrgID(context.Context, valuer.UUID) ([]*types.User, error)

View File

@@ -183,7 +183,7 @@ type APIHandlerOpts struct {
}
// NewAPIHandler returns an APIHandler
func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
func NewAPIHandler(opts APIHandlerOpts, config signoz.Config) (*APIHandler, error) {
querierOpts := querier.QuerierOptions{
Reader: opts.Reader,
Cache: opts.Signoz.Cache,
@@ -270,6 +270,11 @@ func NewAPIHandler(opts APIHandlerOpts) (*APIHandler, error) {
}
}
// If the root user is enabled, the setup is complete
if config.User.Root.Enabled {
aH.SetupCompleted = true
}
aH.Upgrader = &websocket.Upgrader{
CheckOrigin: func(r *http.Request) bool {
return true

View File

@@ -135,7 +135,7 @@ func NewServer(config signoz.Config, signoz *signoz.SigNoz) (*Server, error) {
Signoz: signoz,
QuerierAPI: querierAPI.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.Querier, signoz.Analytics),
QueryParserAPI: queryparser.NewAPI(signoz.Instrumentation.ToProviderSettings(), signoz.QueryParser),
})
}, config)
if err != nil {
return nil, err
}

View File

@@ -167,6 +167,7 @@ func NewSQLMigrationProviderFactories(
sqlmigration.NewMigrateRbacToAuthzFactory(sqlstore),
sqlmigration.NewMigratePublicDashboardsFactory(sqlstore),
sqlmigration.NewAddAnonymousPublicDashboardTransactionFactory(sqlstore),
sqlmigration.NewAddRootUserFactory(sqlstore, sqlschema),
)
}

View File

@@ -389,6 +389,8 @@ func New(
// Initialize all modules
modules := NewModules(sqlstore, tokenizer, emailing, providerSettings, orgGetter, alertmanager, analytics, querier, telemetrystore, telemetryMetadataStore, authNs, authz, cache, queryParser, config, dashboard)
userService := impluser.NewService(providerSettings, impluser.NewStore(sqlstore, providerSettings), modules.User, orgGetter, authz, config.User.Root)
// Initialize all handlers for the modules
handlers := NewHandlers(modules, providerSettings, querier, licensing, global, flagger, gateway, telemetryMetadataStore, authz)
@@ -438,6 +440,7 @@ func New(
factory.NewNamedService(factory.MustNewName("statsreporter"), statsReporter),
factory.NewNamedService(factory.MustNewName("tokenizer"), tokenizer),
factory.NewNamedService(factory.MustNewName("authz"), authz),
factory.NewNamedService(factory.MustNewName("user"), userService),
)
if err != nil {
return nil, err

View File

@@ -0,0 +1,80 @@
package sqlmigration
import (
"context"
"github.com/SigNoz/signoz/pkg/factory"
"github.com/SigNoz/signoz/pkg/sqlschema"
"github.com/SigNoz/signoz/pkg/sqlstore"
"github.com/uptrace/bun"
"github.com/uptrace/bun/migrate"
)
type addRootUser struct {
sqlstore sqlstore.SQLStore
sqlschema sqlschema.SQLSchema
}
func NewAddRootUserFactory(sqlstore sqlstore.SQLStore, sqlschema sqlschema.SQLSchema) factory.ProviderFactory[SQLMigration, Config] {
return factory.NewProviderFactory(factory.MustNewName("add_root_user"), func(ctx context.Context, providerSettings factory.ProviderSettings, config Config) (SQLMigration, error) {
return &addRootUser{
sqlstore: sqlstore,
sqlschema: sqlschema,
}, nil
})
}
func (migration *addRootUser) Register(migrations *migrate.Migrations) error {
if err := migrations.Register(migration.Up, migration.Down); err != nil {
return err
}
return nil
}
func (migration *addRootUser) Up(ctx context.Context, db *bun.DB) error {
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, false); err != nil {
return err
}
tx, err := db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer func() {
_ = tx.Rollback()
}()
table, uniqueConstraints, err := migration.sqlschema.GetTable(ctx, sqlschema.TableName("users"))
if err != nil {
return err
}
column := &sqlschema.Column{
Name: sqlschema.ColumnName("is_root"),
DataType: sqlschema.DataTypeBoolean,
Nullable: false,
}
sqls := migration.sqlschema.Operator().AddColumn(table, uniqueConstraints, column, false)
for _, sql := range sqls {
if _, err := tx.ExecContext(ctx, string(sql)); err != nil {
return err
}
}
if err := tx.Commit(); err != nil {
return err
}
if err := migration.sqlschema.ToggleFKEnforcement(ctx, db, true); err != nil {
return err
}
return nil
}
func (migration *addRootUser) Down(ctx context.Context, db *bun.DB) error {
return nil
}

View File

@@ -12,13 +12,12 @@ import (
var (
// Templates is a list of all the templates that are supported by the emailing service.
// This list should be updated whenever a new template is added.
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameUpdateRole, TemplateNameResetPassword}
Templates = []TemplateName{TemplateNameInvitationEmail, TemplateNameResetPassword}
)
var (
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation_email")}
TemplateNameUpdateRole = TemplateName{valuer.NewString("update_role")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password_email")}
TemplateNameInvitationEmail = TemplateName{valuer.NewString("invitation")}
TemplateNameResetPassword = TemplateName{valuer.NewString("reset_password")}
)
type TemplateName struct{ valuer.String }
@@ -27,8 +26,6 @@ func NewTemplateName(name string) (TemplateName, error) {
switch name {
case TemplateNameInvitationEmail.StringValue():
return TemplateNameInvitationEmail, nil
case TemplateNameUpdateRole.StringValue():
return TemplateNameUpdateRole, nil
case TemplateNameResetPassword.StringValue():
return TemplateNameResetPassword, nil
default:
@@ -40,7 +37,7 @@ func NewContent(template *template.Template, data map[string]any) ([]byte, error
buf := bytes.NewBuffer(nil)
err := template.Execute(buf, data)
if err != nil {
return nil, errors.WrapInternalf(err, errors.CodeInternal, "failed to execute template")
return nil, err
}
return buf.Bytes(), nil

View File

@@ -41,6 +41,22 @@ func NewOrganization(displayName string) *Organization {
}
}
func NewOrganizationWithName(name string) *Organization {
id := valuer.GenerateUUID()
return &Organization{
Identifiable: Identifiable{
ID: id,
},
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
Name: name,
DisplayName: name,
Key: NewOrganizationKey(id),
}
}
func NewOrganizationKey(orgID valuer.UUID) uint32 {
hasher := fnv.New32a()
@@ -74,6 +90,7 @@ type TTLSetting struct {
type OrganizationStore interface {
Create(context.Context, *Organization) error
Get(context.Context, valuer.UUID) (*Organization, error)
GetByName(context.Context, string) (*Organization, error)
GetAll(context.Context) ([]*Organization, error)
ListByKeyRange(context.Context, uint32, uint32) ([]*Organization, error)
Update(context.Context, *Organization) error

View File

@@ -11,15 +11,16 @@ import (
)
var (
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
ErrCodeUserNotFound = errors.MustNewCode("user_not_found")
ErrCodeAmbiguousUser = errors.MustNewCode("ambiguous_user")
ErrUserAlreadyExists = errors.MustNewCode("user_already_exists")
ErrPasswordAlreadyExists = errors.MustNewCode("password_already_exists")
ErrResetPasswordTokenAlreadyExists = errors.MustNewCode("reset_password_token_already_exists")
ErrPasswordNotFound = errors.MustNewCode("password_not_found")
ErrResetPasswordTokenNotFound = errors.MustNewCode("reset_password_token_not_found")
ErrAPIKeyAlreadyExists = errors.MustNewCode("api_key_already_exists")
ErrAPIKeyNotFound = errors.MustNewCode("api_key_not_found")
ErrCodeRootUserOperationUnsupported = errors.MustNewCode("root_user_operation_unsupported")
)
type GettableUser = User
@@ -29,9 +30,10 @@ type User struct {
Identifiable
DisplayName string `bun:"display_name" json:"displayName"`
Email valuer.Email `bun:"email,type:text" json:"email"`
Role Role `bun:"role,type:text" json:"role"`
OrgID valuer.UUID `bun:"org_id,type:text" json:"orgId"`
Email valuer.Email `bun:"email" json:"email"`
Role Role `bun:"role" json:"role"`
OrgID valuer.UUID `bun:"org_id" json:"orgId"`
IsRoot bool `bun:"is_root" json:"isRoot"`
TimeAuditable
}
@@ -64,6 +66,7 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
Email: email,
Role: role,
OrgID: orgID,
IsRoot: false,
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
@@ -71,6 +74,65 @@ func NewUser(displayName string, email valuer.Email, role Role, orgID valuer.UUI
}, nil
}
func NewRootUser(displayName string, email valuer.Email, orgID valuer.UUID) (*User, error) {
if email.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "email is required")
}
if orgID.IsZero() {
return nil, errors.New(errors.TypeInvalidInput, errors.CodeInvalidInput, "orgID is required")
}
return &User{
Identifiable: Identifiable{
ID: valuer.GenerateUUID(),
},
DisplayName: displayName,
Email: email,
Role: RoleAdmin,
OrgID: orgID,
IsRoot: true,
TimeAuditable: TimeAuditable{
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
},
}, nil
}
// Update applies mutable fields from the input to the user. Immutable fields
// (email, is_root, org_id, id) are preserved. Only non-zero input fields are applied.
func (u *User) Update(displayName string, role Role) {
if displayName != "" {
u.DisplayName = displayName
}
if role != "" {
u.Role = role
}
u.UpdatedAt = time.Now()
}
// PromoteToRoot promotes the user to a root user with admin role.
func (u *User) PromoteToRoot() {
u.IsRoot = true
u.Role = RoleAdmin
u.UpdatedAt = time.Now()
}
// UpdateEmail updates the email of the user.
func (u *User) UpdateEmail(email valuer.Email) {
u.Email = email
u.UpdatedAt = time.Now()
}
// ErrIfRoot returns an error if the user is a root user. The caller should
// enrich the error with the specific operation using errors.WithAdditionalf.
func (u *User) ErrIfRoot() error {
if u.IsRoot {
return errors.New(errors.TypeUnsupported, ErrCodeRootUserOperationUnsupported, "this operation is not supported for the root user")
}
return nil
}
func NewTraitsFromUser(user *User) map[string]any {
return map[string]any{
"name": user.DisplayName,
@@ -133,7 +195,7 @@ type UserStore interface {
// List users by email and org ids.
ListUsersByEmailAndOrgIDs(ctx context.Context, email valuer.Email, orgIDs []valuer.UUID) ([]*User, error)
UpdateUser(ctx context.Context, orgID valuer.UUID, id string, user *User) (*User, error)
UpdateUser(ctx context.Context, orgID valuer.UUID, user *User) error
DeleteUser(ctx context.Context, orgID string, id string) error
// Creates a password.
@@ -156,6 +218,9 @@ type UserStore interface {
CountByOrgID(ctx context.Context, orgID valuer.UUID) (int64, error)
// Get root user by org.
GetRootUserByOrgID(ctx context.Context, orgID valuer.UUID) (*User, error)
// Transaction
RunInTx(ctx context.Context, cb func(ctx context.Context) error) error
}

View File

@@ -0,0 +1,91 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>You're Invited to Join SigNoz</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
{{ if .format.Header.Enabled }}
<tr>
<td align="center" style="padding:16px 20px 16px">
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
</td>
</tr>
{{ end }}
<tr>
<td style="padding:16px 20px 16px">
<p style="margin:0 0 16px;font-size:16px;color:#333">
Hi there,
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
You've been invited by <strong>{{.inviter_email}}</strong> to join their SigNoz organization.
</p>
<p style="margin:0 0 12px;font-size:16px;color:#333;line-height:1.6">
A new account has been created for you with the following details:
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:20px;background:#f5f5f5;border-radius:6px;border-left:4px solid #4E74F8">
<p style="margin:0;font-size:15px;color:#333;line-height:1.6">
<strong>Email:</strong> {{.to}}
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Accept the invitation to get started.
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Accept Invitation
</a>
</td>
</tr>
</table>
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
Button not working? Copy and paste this link into your browser:
</p>
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
<a href="{{.link}}" style="color:#4E74F8;text-decoration:none">
{{.link}}
</a>
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
</p>
{{ end }}
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
Thanks,<br><strong>The SigNoz Team</strong>
</p>
</td>
</tr>
{{ if .format.Footer.Enabled }}
<tr>
<td align="center" style="padding:8px 16px 8px">
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
</p>
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
&#169; 2026 SigNoz Inc.
</p>
</td>
</tr>
{{ end }}
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -1,14 +0,0 @@
<!DOCTYPE html>
<html>
<body>
<p>Hi {{.CustomerName}},</p>
<p>You have been invited to join SigNoz project by {{.InviterName}} ({{.InviterEmail}}).</p>
<p>Please click on the following button to accept the invitation:</p>
<a href="{{.Link}}" style="background-color: #000000; color: white; padding: 14px 20px; text-align: center; text-decoration: none; display: inline-block;">Accept Invitation</a>
<p>Button not working? Paste the following link into your browser:</p>
<p>{{.Link}}</p>
<p>Follow docs here 👉 to <a href="https://signoz.io/docs/cloud/">Get Started with SigNoz Cloud</a></p>
<p>Thanks,</p>
<p>SigNoz Team</p>
</body>
</html>

View File

@@ -0,0 +1,91 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>{{.subject}}</title>
</head>
<body style="margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,'Helvetica Neue',Arial,sans-serif;line-height:1.6;color:#333;background:#fff">
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="background:#fff">
<tr>
<td align="center" style="padding:0">
<table role="presentation" width="600" cellspacing="0" cellpadding="0" border="0" style="max-width:600px;width:100%">
{{ if .format.Header.Enabled }}
<tr>
<td align="center" style="padding:16px 20px 16px">
<img src="{{.format.Header.LogoURL}}" alt="SigNoz" width="160" height="40" style="display:block;border:0;outline:none;max-width:100%;height:auto">
</td>
</tr>
{{ end }}
<tr>
<td style="padding:16px 20px 16px">
<p style="margin:0 0 16px;font-size:16px;color:#333">
Hi there,
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
A password reset was requested for your SigNoz account.
</p>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Click the button below to reset your password:
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td align="center">
<a href="{{.Link}}" target="_blank" style="display:inline-block;padding:16px 48px;font-size:16px;font-weight:600;color:#fff;background:#4E74F8;text-decoration:none;border-radius:4px">
Reset Password
</a>
</td>
</tr>
</table>
<p style="margin:0 0 4px;font-size:13px;color:#666;text-align:center">
Button not working? Copy and paste this link into your browser:
</p>
<p style="margin:0 0 16px;font-size:13px;color:#4E74F8;word-break:break-all;text-align:center">
<a href="{{.Link}}" style="color:#4E74F8;text-decoration:none">
{{.Link}}
</a>
</p>
<table role="presentation" width="100%" cellspacing="0" cellpadding="0" border="0" style="margin:0 0 16px">
<tr>
<td style="padding:16px;background:#fff4e6;border-radius:6px;border-left:4px solid #ff9800">
<p style="margin:0;font-size:14px;color:#333;line-height:1.6">
<strong>⏱️ This link will expire in {{.Expiry}}.</strong>
</p>
</td>
</tr>
</table>
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
If you didn't request this password reset, please ignore this email. Your password will remain unchanged.
</p>
{{ if .format.Help.Enabled }}
<p style="margin:0 0 16px;font-size:16px;color:#333;line-height:1.6">
Need help? Chat with our team in the SigNoz application or email us at <a href="mailto:{{.format.Help.Email}}" style="color:#4E74F8;text-decoration:none">{{.format.Help.Email}}</a>.
</p>
{{ end }}
<p style="margin:0;font-size:16px;color:#333;line-height:1.6">
Thanks,<br><strong>The SigNoz Team</strong>
</p>
</td>
</tr>
{{ if .format.Footer.Enabled }}
<tr>
<td align="center" style="padding:8px 16px 8px">
<p style="margin:0 0 8px;font-size:12px;color:#999;line-height:1.5">
<a href="https://signoz.io/terms-of-service/" style="color:#4E74F8;text-decoration:none">Terms of Service</a> - <a href="https://signoz.io/privacy/" style="color:#4E74F8;text-decoration:none">Privacy Policy</a>
</p>
<p style="margin:0;font-size:12px;color:#999;line-height:1.5">
&#169; 2026 SigNoz Inc.
</p>
</td>
</tr>
{{ end }}
</table>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -1,13 +0,0 @@
<!DOCTYPE html>
<html>
<body>
<p>Hello {{.Name}},</p>
<p>You requested a password reset for your SigNoz account.</p>
<p>Click the link below to reset your password:</p>
<a href="{{.Link}}">Reset Password</a>
<p>This link will expire in {{.Expiry}}.</p>
<p>If you didn't request this, please ignore this email. Your password will remain unchanged.</p>
<br>
<p>Best regards,<br>The SigNoz Team</p>
</body>
</html>

View File

@@ -1,21 +0,0 @@
<!DOCTYPE html>
<html>
<body>
Hi {{.CustomerName}},<br>
Your role in <strong>SigNoz</strong> has been updated by {{.UpdatedByEmail}}.
<p>
<strong>Previous Role:</strong> {{.OldRole}}<br>
<strong>New Role:</strong> {{.NewRole}}
</p>
{{if eq .OldRole "Admin"}}
<p>
If you were not expecting this change or have any questions, please contact us at <a href="mailto:support@signoz.io">support@signoz.io</a>.
</p>
{{else}}
<p>
If you were not expecting this change or have any questions, please reach out to your administrator.
</p>
{{end}}
<p>Best regards,<br>The SigNoz Team</p>
</body>
</html>

View File

@@ -112,7 +112,7 @@ def verify_webhook_alert_expectation(
break
# wait for some time before checking again
time.sleep(10)
time.sleep(1)
# We've waited but we didn't get the expected number of alerts
@@ -133,3 +133,15 @@ def verify_webhook_alert_expectation(
)
return True # should not reach here
def update_rule_channel_name(rule_data: dict, channel_name: str):
"""
updates the channel name in the thresholds
so alert notification are sent to the given channel
"""
thresholds = rule_data["condition"]["thresholds"]
if "kind" in thresholds and thresholds["kind"] == "basic":
# loop over all the sepcs and update the channels
for spec in thresholds["spec"]:
spec["channels"] = [channel_name]

View File

@@ -1,16 +1,18 @@
"""Fixtures for cloud integration tests."""
from typing import Callable, Optional
from http import HTTPStatus
from typing import Callable, Optional
import pytest
import requests
from fixtures import types
from fixtures.logger import setup_logger
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@pytest.fixture(scope="function")
def create_cloud_integration_account(
request: pytest.FixtureRequest,
@@ -24,9 +26,7 @@ def create_cloud_integration_account(
cloud_provider: str = "aws",
) -> dict:
nonlocal created_account_id, cloud_provider_used
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
)
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
request_payload = {
"account_config": {"regions": ["us-east-1"]},
@@ -59,9 +59,7 @@ def create_cloud_integration_account(
def _disconnect(admin_token: str, cloud_provider: str) -> requests.Response:
assert created_account_id
disconnect_endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{created_account_id}/disconnect"
)
disconnect_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{created_account_id}/disconnect"
return requests.post(
signoz.self.host_configs["8080"].get(disconnect_endpoint),
headers={"Authorization": f"Bearer {admin_token}"},

View File

@@ -1,4 +1,5 @@
"""Fixtures for cloud integration tests."""
from http import HTTPStatus
import requests

View File

@@ -43,6 +43,10 @@ class MetricsTimeSeries(ABC):
resource_attrs: dict[str, str] = {},
scope_attrs: dict[str, str] = {},
) -> None:
# Create a copy of labels to avoid mutating the caller's dictionary
labels = dict(labels)
# Add metric_name to the labels to support promql queries
labels["__name__"] = metric_name
self.env = env
self.metric_name = metric_name
self.temporality = temporality

View File

@@ -69,6 +69,10 @@ def signoz( # pylint: disable=too-many-arguments,too-many-positional-arguments
"SIGNOZ_GLOBAL_INGESTION__URL": "https://ingest.test.signoz.cloud",
"SIGNOZ_USER_PASSWORD_RESET_ALLOW__SELF": True,
"SIGNOZ_USER_PASSWORD_RESET_MAX__TOKEN__LIFETIME": "6h",
"RULES_EVAL_DELAY": "0s",
"SIGNOZ_ALERTMANAGER_SIGNOZ_POLL__INTERVAL": "5s",
"SIGNOZ_ALERTMANAGER_SIGNOZ_ROUTE_GROUP__WAIT": "1s",
"SIGNOZ_ALERTMANAGER_SIGNOZ_ROUTE_GROUP__INTERVAL": "5s",
}
| sqlstore.env
| clickhouse.env

View File

@@ -191,3 +191,15 @@ class AlertExpectation:
# seconds to wait for the alerts to be fired, if no
# alerts are fired in the expected time, the test will fail
wait_time_seconds: int
@dataclass(frozen=True)
class AlertTestCase:
# name of the test case
name: str
# path to the rule file in testdata directory
rule_path: str
# list of alert data that will be inserted into the database
alert_data: List[AlertData]
# list of alert expectations for the test case
alert_expectation: AlertExpectation

View File

@@ -8,6 +8,9 @@ from wiremock.client import HttpMethods, Mapping, MappingRequest, MappingRespons
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
def test_webhook_notification_channel(
@@ -20,6 +23,7 @@ def test_webhook_notification_channel(
"""
Tests the creation and delivery of test alerts on the created notification channel
"""
logger.info("Setting up notification channel")
# Prepare notification channel name and webhook endpoint
notification_channel_name = f"notification-channel-{uuid.uuid4()}"
@@ -55,10 +59,10 @@ def test_webhook_notification_channel(
)
# TODO: @abhishekhugetech # pylint: disable=W0511
# Time required for Org to be registered
# in the alertmanager, default 1m.
# Time required for newly created Org to be registered in the alertmanager is 5 seconds in signoz.py
# this will be fixed after [https://github.com/SigNoz/engineering-pod/issues/3800]
time.sleep(65)
# 10 seconds safe time for org to be registered in the alertmanager
time.sleep(10)
# Call test API for the notification channel
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)

View File

@@ -0,0 +1,671 @@
import json
import uuid
from datetime import datetime, timedelta, timezone
from typing import Callable, List
import pytest
from wiremock.client import HttpMethods, Mapping, MappingRequest, MappingResponse
from fixtures import types
from fixtures.alertutils import (
update_rule_channel_name,
verify_webhook_alert_expectation,
)
from fixtures.logger import setup_logger
from fixtures.utils import get_testdata_file_path
# test cases for match type and compare operators have wait time of 30 seconds to verify the alert expectation.
# we've poistioned the alert data to fire the alert on first eval of rule manager, the eval frequency
# for most alert rules are set of 15s so considering this delay plus some delay from alert manager's
# group_wait and group_interval, even in worst case most alerts should be triggered in about 30 seconds
TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS = [
types.AlertTestCase(
name="test_threshold_above_at_least_once",
rule_path="alerts/test_scenarios/threshold_above_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_at_least_once/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_above_at_least_once",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_above_all_the_time",
rule_path="alerts/test_scenarios/threshold_above_all_the_time/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_all_the_time/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_above_all_the_time",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_above_in_total",
rule_path="alerts/test_scenarios/threshold_above_in_total/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_above_in_total/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_above_in_total",
"threshold.name": "critical",
"service": "server",
},
),
types.FiringAlert(
labels={
"alertname": "threshold_above_in_total",
"threshold.name": "critical",
"service": "api",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_above_average",
rule_path="alerts/test_scenarios/threshold_above_average/rule.json",
alert_data=[
types.AlertData(
type="traces",
data_path="alerts/test_scenarios/threshold_above_average/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_above_average",
"threshold.name": "critical",
}
),
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last, pylint: disable=W0511
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed
# types.AlertTestCase(
# name="test_threshold_above_last",
# rule_path="alerts/test_scenarios/threshold_above_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_above_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_above_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_below_at_least_once",
rule_path="alerts/test_scenarios/threshold_below_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="logs",
data_path="alerts/test_scenarios/threshold_below_at_least_once/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_below_at_least_once",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_below_all_the_time",
rule_path="alerts/test_scenarios/threshold_below_all_the_time/rule.json",
alert_data=[
types.AlertData(
type="logs",
data_path="alerts/test_scenarios/threshold_below_all_the_time/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_below_all_the_time",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_below_in_total",
rule_path="alerts/test_scenarios/threshold_below_in_total/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_below_in_total/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_below_in_total",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_below_average",
rule_path="alerts/test_scenarios/threshold_below_average/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_below_average/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_below_average",
"threshold.name": "critical",
}
),
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last,
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
# types.AlertTestCase(
# name="test_threshold_below_last",
# rule_path="alerts/test_scenarios/threshold_below_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_below_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_below_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_equal_to_at_least_once",
rule_path="alerts/test_scenarios/threshold_equal_to_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_equal_to_at_least_once/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_equal_to_at_least_once",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_equal_to_all_the_time",
rule_path="alerts/test_scenarios/threshold_equal_to_all_the_time/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_equal_to_all_the_time/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_equal_to_all_the_time",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_equal_to_in_total",
rule_path="alerts/test_scenarios/threshold_equal_to_in_total/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_equal_to_in_total/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_equal_to_in_total",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_equal_to_average",
rule_path="alerts/test_scenarios/threshold_equal_to_average/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_equal_to_average/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_equal_to_average",
"threshold.name": "critical",
}
),
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last,
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
# types.AlertTestCase(
# name="test_threshold_equal_to_last",
# rule_path="alerts/test_scenarios/threshold_equal_to_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_equal_to_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_equal_to_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
types.AlertTestCase(
name="test_threshold_not_equal_to_at_least_once",
rule_path="alerts/test_scenarios/threshold_not_equal_to_at_least_once/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_not_equal_to_at_least_once/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_not_equal_to_at_least_once",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_not_equal_to_all_the_time",
rule_path="alerts/test_scenarios/threshold_not_equal_to_all_the_time/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_not_equal_to_all_the_time/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_not_equal_to_all_the_time",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_not_equal_to_in_total",
rule_path="alerts/test_scenarios/threshold_not_equal_to_in_total/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_not_equal_to_in_total/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_not_equal_to_in_total",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_threshold_not_equal_to_average",
rule_path="alerts/test_scenarios/threshold_not_equal_to_average/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/threshold_not_equal_to_average/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "threshold_not_equal_to_average",
"threshold.name": "critical",
}
),
],
),
),
# TODO: @abhishekhugetech enable the test for matchType last,
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3801) with matchType last is fixed, pylint: disable=W0511
# types.AlertTestCase(
# name="test_threshold_not_equal_to_last",
# rule_path="alerts/test_scenarios/threshold_not_equal_to_last/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/threshold_not_equal_to_last/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# wait_time_seconds=30,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "threshold_not_equal_to_last",
# "threshold.name": "critical",
# }
# ),
# ],
# ),
# ),
]
# test cases unit conversion
TEST_RULES_UNIT_CONVERSION = [
types.AlertTestCase(
name="test_unit_conversion_bytes_to_mb",
rule_path="alerts/test_scenarios/unit_conversion_bytes_to_mb/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/unit_conversion_bytes_to_mb/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "unit_conversion_bytes_to_mb",
"threshold.name": "critical",
}
),
],
),
),
types.AlertTestCase(
name="test_unit_conversion_ms_to_second",
rule_path="alerts/test_scenarios/unit_conversion_ms_to_second/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/unit_conversion_ms_to_second/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "unit_conversion_ms_to_second",
"threshold.name": "critical",
}
),
],
),
),
]
# test cases miscellaneous cases, no data and multi threshold
TEST_RULES_MISCELLANEOUS = [
types.AlertTestCase(
name="test_no_data_rule_test",
rule_path="alerts/test_scenarios/no_data_rule_test/rule.json",
alert_data=[
types.AlertData(
type="metrics",
data_path="alerts/test_scenarios/no_data_rule_test/alert_data.jsonl",
),
],
alert_expectation=types.AlertExpectation(
should_alert=True,
wait_time_seconds=30,
expected_alerts=[
types.FiringAlert(
labels={
"alertname": "[No data] no_data_rule_test",
"nodata": "true",
}
),
],
),
),
# TODO: @abhishekhugetech enable the test for multi threshold rule, pylint: disable=W0511
# after the [issue](https://github.com/SigNoz/engineering-pod/issues/3934) with alertManager is resolved
# types.AlertTestCase(
# name="test_multi_threshold_rule_test",
# rule_path="alerts/test_scenarios/multi_threshold_rule_test/rule.json",
# alert_data=[
# types.AlertData(
# type="metrics",
# data_path="alerts/test_scenarios/multi_threshold_rule_test/alert_data.jsonl",
# ),
# ],
# alert_expectation=types.AlertExpectation(
# should_alert=True,
# # the second alert will be fired with some delay from alert manager's group_interval
# # so taking this in consideration, the wait time is 90 seconds (30s + 30s for next alert + 30s buffer)
# wait_time_seconds=90,
# expected_alerts=[
# types.FiringAlert(
# labels={
# "alertname": "multi_threshold_rule_test",
# "threshold.name": "info",
# }
# ),
# types.FiringAlert(
# labels={
# "alertname": "multi_threshold_rule_test",
# "threshold.name": "warning",
# }
# ),
# ],
# ),
# ),
]
logger = setup_logger(__name__)
@pytest.mark.parametrize(
"alert_test_case",
TEST_RULES_MATCH_TYPE_AND_COMPARE_OPERATORS
+ TEST_RULES_UNIT_CONVERSION
+ TEST_RULES_MISCELLANEOUS,
ids=lambda alert_test_case: alert_test_case.name,
)
def test_basic_alert_rule_conditions(
# Notification channel related fixtures
notification_channel: types.TestContainerDocker,
make_http_mocks: Callable[[types.TestContainerDocker, List[Mapping]], None],
create_webhook_notification_channel: Callable[[str, str, dict, bool], str],
# Alert rule related fixtures
create_alert_rule: Callable[[dict], str],
# Alert data insertion related fixtures
insert_alert_data: Callable[[List[types.AlertData], datetime], None],
alert_test_case: types.AlertTestCase,
):
# Prepare notification channel name and webhook endpoint
notification_channel_name = str(uuid.uuid4())
webhook_endpoint_path = f"/alert/{notification_channel_name}"
notification_url = notification_channel.container_configs["8080"].get(
webhook_endpoint_path
)
logger.info("notification_url: %s", {"notification_url": notification_url})
# register the mock endpoint in notification channel
make_http_mocks(
notification_channel,
[
Mapping(
request=MappingRequest(
method=HttpMethods.POST,
url=webhook_endpoint_path,
),
response=MappingResponse(
status=200,
json_body={},
),
persistent=False,
)
],
)
# Create an alert channel using the given route
create_webhook_notification_channel(
channel_name=notification_channel_name,
webhook_url=notification_url,
http_config={},
send_resolved=False,
)
logger.info(
"alert channel created with name: %s",
{"notification_channel_name": notification_channel_name},
)
# Insert alert data
insert_alert_data(
alert_test_case.alert_data,
base_time=datetime.now(tz=timezone.utc) - timedelta(minutes=5),
)
# Create Alert Rule
rule_path = get_testdata_file_path(alert_test_case.rule_path)
with open(rule_path, "r", encoding="utf-8") as f:
rule_data = json.loads(f.read())
# Update the channel name in the rule data
update_rule_channel_name(rule_data, notification_channel_name)
rule_id = create_alert_rule(rule_data)
logger.info(
"rule created with id: %s",
{"rule_id": rule_id, "rule_name": rule_data["alert"]},
)
# Verify alert expectation
verify_webhook_alert_expectation(
notification_channel,
notification_channel_name,
alert_test_case.alert_expectation,
)

View File

@@ -1,7 +1,6 @@
from http import HTTPStatus
from typing import Callable
import pytest
import requests
from fixtures import types
@@ -21,7 +20,9 @@ def test_generate_connection_url(
# Get authentication token for admin user
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
cloud_provider = "aws"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
)
# Prepare request payload
request_payload = {
@@ -65,9 +66,7 @@ def test_generate_connection_url(
data = response_data["data"]
# Assert account_id is a valid UUID format
assert (
len(data["account_id"]) > 0
), "account_id should be a non-empty string (UUID)"
assert len(data["account_id"]) > 0, "account_id should be a non-empty string (UUID)"
# Assert connection_url contains expected CloudFormation parameters
connection_url = data["connection_url"]
@@ -111,7 +110,9 @@ def test_generate_connection_url_unsupported_provider(
# Try with GCP (unsupported)
cloud_provider = "gcp"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/generate-connection-url"
)
request_payload = {
"account_config": {"regions": ["us-central1"]},

View File

@@ -1,17 +1,13 @@
import uuid
from http import HTTPStatus
from typing import Callable
import uuid
import pytest
import requests
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logger import setup_logger
from fixtures.cloudintegrations import (
create_cloud_integration_account,
)
from fixtures.cloudintegrationsutils import simulate_agent_checkin
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@@ -40,8 +36,9 @@ def test_list_connected_accounts_empty(
data = response_data.get("data", response_data)
assert "accounts" in data, "Response should contain 'accounts' field"
assert isinstance(data["accounts"], list), "Accounts should be a list"
assert len(data["accounts"]) == 0, "Accounts list should be empty when no accounts are connected"
assert (
len(data["accounts"]) == 0
), "Accounts list should be empty when no accounts are connected"
def test_list_connected_accounts_with_account(
@@ -60,7 +57,9 @@ def test_list_connected_accounts_with_account(
# Simulate agent check-in to mark as connected
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# List accounts
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts"
@@ -87,7 +86,6 @@ def test_list_connected_accounts_with_account(
assert "status" in account, "Account should have status field"
def test_get_account_status(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -163,16 +161,16 @@ def test_update_account_config(
# Simulate agent check-in to mark as connected
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Update account configuration
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{account_id}/config"
)
updated_config = {
"config": {"regions": ["us-east-1", "us-west-2", "eu-west-1"]}
}
updated_config = {"config": {"regions": ["us-east-1", "us-west-2", "eu-west-1"]}}
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
@@ -198,7 +196,6 @@ def test_update_account_config(
timeout=10,
)
list_response_data = list_response.json()
list_data = list_response_data.get("data", list_response_data)
account = next((a for a in list_data["accounts"] if a["id"] == account_id), None)
@@ -213,7 +210,6 @@ def test_update_account_config(
}, "Regions should match updated config"
def test_disconnect_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -230,7 +226,9 @@ def test_disconnect_account(
# Simulate agent check-in to mark as connected
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Disconnect the account
endpoint = (
@@ -262,8 +260,9 @@ def test_disconnect_account(
disconnected_account = next(
(a for a in list_data["accounts"] if a["id"] == account_id), None
)
assert disconnected_account is None, f"Account {account_id} should be removed from connected accounts"
assert (
disconnected_account is None
), f"Account {account_id} should be removed from connected accounts"
def test_disconnect_account_not_found(
@@ -277,9 +276,7 @@ def test_disconnect_account_not_found(
cloud_provider = "aws"
fake_account_id = "00000000-0000-0000-0000-000000000000"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{fake_account_id}/disconnect"
)
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/accounts/{fake_account_id}/disconnect"
response = requests.post(
signoz.self.host_configs["8080"].get(endpoint),
@@ -292,7 +289,6 @@ def test_disconnect_account_not_found(
), f"Expected 404, got {response.status_code}"
def test_list_accounts_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument

View File

@@ -1,17 +1,13 @@
import uuid
from http import HTTPStatus
from typing import Callable
import uuid
import pytest
import requests
from fixtures import types
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.logger import setup_logger
from fixtures.cloudintegrations import (
create_cloud_integration_account,
)
from fixtures.cloudintegrationsutils import simulate_agent_checkin
from fixtures.logger import setup_logger
logger = setup_logger(__name__)
@@ -50,7 +46,6 @@ def test_list_services_without_account(
assert "icon" in service, "Service should have 'icon' field"
def test_list_services_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -66,7 +61,9 @@ def test_list_services_with_account(
account_id = account_data["account_id"]
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# List services for the account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services?cloud_account_id={cloud_account_id}"
@@ -94,7 +91,6 @@ def test_list_services_with_account(
assert "icon" in service, "Service should have 'icon' field"
def test_get_service_details_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -141,7 +137,6 @@ def test_get_service_details_without_account(
assert isinstance(data["assets"], dict), "Assets should be a dictionary"
def test_get_service_details_with_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -157,7 +152,9 @@ def test_get_service_details_with_account(
account_id = account_data["account_id"]
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get list of services first
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
@@ -196,7 +193,6 @@ def test_get_service_details_with_account(
assert "status" in data, "Config should have 'status' field"
def test_get_service_details_invalid_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -220,7 +216,6 @@ def test_get_service_details_invalid_service(
), f"Expected 404, got {response.status_code}"
def test_list_services_unsupported_provider(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -243,7 +238,6 @@ def test_list_services_unsupported_provider(
), f"Expected 400, got {response.status_code}"
def test_update_service_config(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -259,7 +253,9 @@ def test_update_service_config(
account_id = account_data["account_id"]
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get list of services to pick a valid service ID
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
@@ -274,7 +270,9 @@ def test_update_service_config(
service_id = list_data["services"][0]["id"]
# Update service configuration
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
)
config_payload = {
"cloud_account_id": cloud_account_id,
@@ -306,7 +304,6 @@ def test_update_service_config(
assert "logs" in data["config"], "Config should contain 'logs' field"
def test_update_service_config_without_account(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -329,7 +326,9 @@ def test_update_service_config_without_account(
service_id = list_data["services"][0]["id"]
# Try to update config with non-existent account
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
)
fake_cloud_account_id = str(uuid.uuid4())
config_payload = {
@@ -351,7 +350,6 @@ def test_update_service_config_without_account(
), f"Expected 500 for non-existent account, got {response.status_code}"
def test_update_service_config_invalid_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -367,11 +365,15 @@ def test_update_service_config_invalid_service(
account_id = account_data["account_id"]
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Try to update config for invalid service
fake_service_id = "non-existent-service"
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{fake_service_id}/config"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/services/{fake_service_id}/config"
)
config_payload = {
"cloud_account_id": cloud_account_id,
@@ -392,7 +394,6 @@ def test_update_service_config_invalid_service(
), f"Expected 404 for invalid service, got {response.status_code}"
def test_update_service_config_disable_service(
signoz: types.SigNoz,
create_user_admin: types.Operation, # pylint: disable=unused-argument
@@ -408,7 +409,9 @@ def test_update_service_config_disable_service(
account_id = account_data["account_id"]
cloud_account_id = str(uuid.uuid4())
simulate_agent_checkin(signoz, admin_token, cloud_provider, account_id, cloud_account_id)
simulate_agent_checkin(
signoz, admin_token, cloud_provider, account_id, cloud_account_id
)
# Get a valid service
list_endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services"
@@ -422,7 +425,9 @@ def test_update_service_config_disable_service(
service_id = list_data["services"][0]["id"]
# First enable the service
endpoint = f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
endpoint = (
f"/api/v1/cloud-integrations/{cloud_provider}/services/{service_id}/config"
)
enable_payload = {
"cloud_account_id": cloud_account_id,

View File

@@ -2,11 +2,11 @@
Look at the multi_temporality_counters_1h.jsonl file for the relevant data
"""
import os
import random
from datetime import datetime, timedelta, timezone
from http import HTTPStatus
import random
from typing import Any, Callable, List
from typing import Callable, List
import pytest
from fixtures import types
@@ -21,8 +21,13 @@ from fixtures.querier import (
from fixtures.utils import get_testdata_file_path
MULTI_TEMPORALITY_FILE = get_testdata_file_path("multi_temporality_counters_1h.jsonl")
MULTI_TEMPORALITY_FILE_10h = get_testdata_file_path("multi_temporality_counters_10h.jsonl")
MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path("multi_temporality_counters_24h.jsonl")
MULTI_TEMPORALITY_FILE_10h = get_testdata_file_path(
"multi_temporality_counters_10h.jsonl"
)
MULTI_TEMPORALITY_FILE_24h = get_testdata_file_path(
"multi_temporality_counters_24h.jsonl"
)
@pytest.mark.parametrize(
"time_aggregation, expected_value_at_31st_minute, expected_value_at_32nd_minute, steady_value",
@@ -39,7 +44,7 @@ def test_with_steady_values_and_reset(
time_aggregation: str,
expected_value_at_31st_minute: float,
expected_value_at_32nd_minute: float,
steady_value: float
steady_value: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
@@ -69,22 +74,21 @@ def test_with_steady_values_and_reset(
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 59
# the counter reset happened at 31st minute
assert (
result_values[30]["value"] == expected_value_at_31st_minute
)
assert (
result_values[31]["value"] == expected_value_at_32nd_minute
)
assert result_values[30]["value"] == expected_value_at_31st_minute
assert result_values[31]["value"] == expected_value_at_32nd_minute
assert (
result_values[39]["value"] == steady_value
) # 39th minute is when cumulative shifts to delta
) # 39th minute is when cumulative shifts to delta
count_of_steady_rate = sum(1 for v in result_values if v["value"] == steady_value)
assert (
count_of_steady_rate >= 56
) # 59 - (1 reset + 1 high rate + 1 at the beginning)
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:
assert v["value"] >= 0, f"{time_aggregation} should not be negative: {v['value']}"
assert (
v["value"] >= 0
), f"{time_aggregation} should not be negative: {v['value']}"
@pytest.mark.parametrize(
"time_aggregation, stable_health_value, stable_products_value, stable_checkout_value, spike_checkout_value, stable_orders_value, spike_users_value",
@@ -161,20 +165,26 @@ def test_group_by_endpoint(
assert (
len(health_values) >= 58
), f"Expected >= 58 values for /health, got {len(health_values)}"
count_steady_health = sum(1 for v in health_values if v["value"] == stable_health_value)
count_steady_health = sum(
1 for v in health_values if v["value"] == stable_health_value
)
assert (
count_steady_health >= 57
), f"Expected >= 57 steady rate values ({stable_health_value}) for /health, got {count_steady_health}"
# all /health rates should be state except possibly first/last due to boundaries
for v in health_values[1:-1]:
assert v["value"] == stable_health_value, f"Expected /health rate {stable_health_value}, got {v['value']}"
assert (
v["value"] == stable_health_value
), f"Expected /health rate {stable_health_value}, got {v['value']}"
# /products: 51 data points with 10-minute gap (t20-t29 missing), steady +20/min
products_values = endpoint_values["/products"]
assert (
len(products_values) >= 49
), f"Expected >= 49 values for /products, got {len(products_values)}"
count_steady_products = sum(1 for v in products_values if v["value"] == stable_products_value)
count_steady_products = sum(
1 for v in products_values if v["value"] == stable_products_value
)
# most values should be stable, some boundary values differ due to 10-min gap
assert (
@@ -182,7 +192,9 @@ def test_group_by_endpoint(
), f"Expected >= 46 steady rate values ({stable_products_value}) for /products, got {count_steady_products}"
# check that non-stable values are due to gap averaging (should be lower)
gap_boundary_values = [v["value"] for v in products_values if v["value"] != stable_products_value]
gap_boundary_values = [
v["value"] for v in products_values if v["value"] != stable_products_value
]
for val in gap_boundary_values:
assert (
0 < val < stable_products_value
@@ -193,12 +205,16 @@ def test_group_by_endpoint(
assert (
len(checkout_values) >= 59
), f"Expected >= 59 values for /checkout, got {len(checkout_values)}"
count_steady_checkout = sum(1 for v in checkout_values if v["value"] == stable_checkout_value)
count_steady_checkout = sum(
1 for v in checkout_values if v["value"] == stable_checkout_value
)
assert (
count_steady_checkout >= 53
), f"Expected >= 53 steady {time_aggregation} values ({stable_checkout_value}) for /checkout, got {count_steady_checkout}"
# check that spike values exist (traffic spike +50/min at t40-t44)
count_spike_checkout = sum(1 for v in checkout_values if v["value"] == spike_checkout_value)
count_spike_checkout = sum(
1 for v in checkout_values if v["value"] == spike_checkout_value
)
assert (
count_spike_checkout >= 4
), f"Expected >= 4 spike {time_aggregation} values ({spike_checkout_value}) for /checkout, got {count_spike_checkout}"
@@ -220,12 +236,16 @@ def test_group_by_endpoint(
assert (
len(orders_values) >= 58
), f"Expected >= 58 values for /orders, got {len(orders_values)}"
count_steady_orders = sum(1 for v in orders_values if v["value"] == stable_orders_value)
count_steady_orders = sum(
1 for v in orders_values if v["value"] == stable_orders_value
)
assert (
count_steady_orders >= 55
), f"Expected >= 55 steady {time_aggregation} values ({stable_orders_value}) for /orders, got {count_steady_orders}"
# check for counter reset effects - there should be some non-standard values
non_standard_orders = [v["value"] for v in orders_values if v["value"] != stable_orders_value]
non_standard_orders = [
v["value"] for v in orders_values if v["value"] != stable_orders_value
]
assert (
len(non_standard_orders) >= 2
), f"Expected >= 2 non-standard values due to counter reset, got {non_standard_orders}"
@@ -252,6 +272,7 @@ def test_group_by_endpoint(
count_increment_rate >= 8
), f"Expected >= 8 increment {time_aggregation} values ({spike_users_value}) for /users, got {count_increment_rate}"
@pytest.mark.parametrize(
"time_aggregation, expected_value_at_30th_minute, expected_value_at_31st_minute, value_at_switch",
[
@@ -267,7 +288,7 @@ def test_for_service_with_switch(
time_aggregation: str,
expected_value_at_30th_minute: float,
expected_value_at_31st_minute: float,
value_at_switch: float
value_at_switch: float,
) -> None:
now = datetime.now(tz=timezone.utc).replace(second=0, microsecond=0)
start_ms = int((now - timedelta(minutes=65)).timestamp() * 1000)
@@ -296,21 +317,18 @@ def test_for_service_with_switch(
data = response.json()
result_values = sorted(get_series_values(data, "A"), key=lambda x: x["timestamp"])
assert len(result_values) >= 60
assert result_values[30]["value"] == expected_value_at_30th_minute # 0.183
assert result_values[31]["value"] == expected_value_at_31st_minute # 0.183
assert result_values[38]["value"] == value_at_switch # 0.25
assert (
result_values[30]["value"] == expected_value_at_30th_minute #0.183
)
assert (
result_values[31]["value"] == expected_value_at_31st_minute # 0.183
)
assert (
result_values[38]["value"] == value_at_switch # 0.25
)
assert (
result_values[39]["value"] == value_at_switch # 0.25
) # 39th minute is when cumulative shifts to delta
result_values[39]["value"] == value_at_switch # 0.25
) # 39th minute is when cumulative shifts to delta
# All rates should be non-negative (stale periods = 0 rate)
for v in result_values:
assert v["value"] >= 0, f"{time_aggregation} should not be negative: {v['value']}"
assert (
v["value"] >= 0
), f"{time_aggregation} should not be negative: {v['value']}"
@pytest.mark.parametrize(
"time_aggregation, expected_value",
@@ -355,6 +373,7 @@ def test_for_week_long_time_range(
for value in result_values[1:]:
assert value["value"] == expected_value
@pytest.mark.parametrize(
"time_aggregation, expected_value",
[

View File

@@ -1,23 +1,23 @@
import pytest
from http import HTTPStatus
from typing import Callable
import pytest
import requests
from sqlalchemy import sql
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD
from fixtures.types import Operation, SigNoz
ANONYMOUS_USER_ID = "00000000-0000-0000-0000-000000000000"
def test_managed_roles_create_on_register(
signoz: SigNoz,
create_user_admin: Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# get the list of all roles.
response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/roles"),
@@ -32,18 +32,22 @@ def test_managed_roles_create_on_register(
# since this check happens immediately post registeration, all the managed roles should be present.
assert len(data) == 4
role_names = {role["name"] for role in data}
expected_names = {"signoz-admin", "signoz-viewer", "signoz-editor", "signoz-anonymous"}
expected_names = {
"signoz-admin",
"signoz-viewer",
"signoz-editor",
"signoz-anonymous",
}
# do the set mapping as this is order insensitive, direct list match is order-sensitive.
assert set(role_names) == expected_names
def test_root_user_signoz_admin_assignment(
request: pytest.FixtureRequest,
signoz: SigNoz,
create_user_admin: Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
):
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# Get the user from the /user/me endpoint and extract the id
@@ -64,14 +68,16 @@ def test_root_user_signoz_admin_assignment(
# this validates to some extent that the role assignment is complete under the assumption that middleware is functioning as expected.
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
# Loop over the roles and get the org_id and id for signoz-admin role
roles = response.json()["data"]
admin_role_entry = next((role for role in roles if role["name"] == "signoz-admin"), None)
admin_role_entry = next(
(role for role in roles if role["name"] == "signoz-admin"), None
)
assert admin_role_entry is not None
org_id = admin_role_entry["orgId"]
# to be super sure of authorization server, let's validate the tuples in DB as well.
# to be super sure of authorization server, let's validate the tuples in DB as well.
# todo[@vikrantgupta25]: replace this with role memebers handler once built.
with signoz.sqlstore.conn.connect() as conn:
# verify the entry present for role assignment
@@ -80,15 +86,14 @@ def test_root_user_signoz_admin_assignment(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
)
tuple_row = tuple_result.mappings().fetchone()
assert tuple_row is not None
# check that the tuple if for role assignment
assert tuple_row['object_type'] == "role"
assert tuple_row['relation'] == "assignee"
assert tuple_row["object_type"] == "role"
assert tuple_row["relation"] == "assignee"
if request.config.getoption("--sqlstore-provider") == 'sqlite':
if request.config.getoption("--sqlstore-provider") == "sqlite":
user_object_id = f"organization/{org_id}/user/{user_id}"
assert tuple_row["user_object_type"] == "user"
assert tuple_row["user_object_id"] == user_object_id
@@ -97,13 +102,13 @@ def test_root_user_signoz_admin_assignment(
assert tuple_row["user_type"] == "user"
assert tuple_row["_user"] == _user
def test_anonymous_user_signoz_anonymous_assignment(
request: pytest.FixtureRequest,
signoz: SigNoz,
create_user_admin: Operation, # pylint: disable=unused-argument
get_token: Callable[[str, str], str],
):
):
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
response = requests.get(
@@ -115,14 +120,16 @@ def test_anonymous_user_signoz_anonymous_assignment(
# this validates to some extent that the role assignment is complete under the assumption that middleware is functioning as expected.
assert response.status_code == HTTPStatus.OK
assert response.json()["status"] == "success"
# Loop over the roles and get the org_id and id for signoz-admin role
roles = response.json()["data"]
admin_role_entry = next((role for role in roles if role["name"] == "signoz-anonymous"), None)
admin_role_entry = next(
(role for role in roles if role["name"] == "signoz-anonymous"), None
)
assert admin_role_entry is not None
org_id = admin_role_entry["orgId"]
# to be super sure of authorization server, let's validate the tuples in DB as well.
# to be super sure of authorization server, let's validate the tuples in DB as well.
# todo[@vikrantgupta25]: replace this with role memebers handler once built.
with signoz.sqlstore.conn.connect() as conn:
# verify the entry present for role assignment
@@ -131,15 +138,14 @@ def test_anonymous_user_signoz_anonymous_assignment(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id"),
{"object_id": tuple_object_id},
)
tuple_row = tuple_result.mappings().fetchone()
assert tuple_row is not None
# check that the tuple if for role assignment
assert tuple_row['object_type'] == "role"
assert tuple_row['relation'] == "assignee"
assert tuple_row["object_type"] == "role"
assert tuple_row["relation"] == "assignee"
if request.config.getoption("--sqlstore-provider") == 'sqlite':
if request.config.getoption("--sqlstore-provider") == "sqlite":
user_object_id = f"organization/{org_id}/anonymous/{ANONYMOUS_USER_ID}"
assert tuple_row["user_object_type"] == "anonymous"
assert tuple_row["user_object_id"] == user_object_id
@@ -147,5 +153,3 @@ def test_anonymous_user_signoz_anonymous_assignment(
_user = f"anonymous:organization/{org_id}/anonymous/{ANONYMOUS_USER_ID}"
assert tuple_row["user_type"] == "user"
assert tuple_row["_user"] == _user

View File

@@ -1,11 +1,16 @@
import pytest
from http import HTTPStatus
from typing import Callable
import pytest
import requests
from sqlalchemy import sql
from fixtures.auth import USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD, USER_EDITOR_EMAIL, USER_EDITOR_PASSWORD
from fixtures.auth import (
USER_ADMIN_EMAIL,
USER_ADMIN_PASSWORD,
USER_EDITOR_EMAIL,
USER_EDITOR_PASSWORD,
)
from fixtures.types import Operation, SigNoz
@@ -16,7 +21,7 @@ def test_user_invite_accept_role_grant(
get_token: Callable[[str, str], str],
):
admin_token = get_token(USER_ADMIN_EMAIL, USER_ADMIN_PASSWORD)
# invite a user as editor
invite_payload = {
"email": USER_EDITOR_EMAIL,
@@ -30,7 +35,7 @@ def test_user_invite_accept_role_grant(
)
assert invite_response.status_code == HTTPStatus.CREATED
invite_token = invite_response.json()["data"]["token"]
# accept the invite for editor
accept_payload = {
"token": invite_token,
@@ -40,7 +45,7 @@ def test_user_invite_accept_role_grant(
signoz.self.host_configs["8080"].get("/api/v1/invite/accept"),
json=accept_payload,
timeout=2,
)
)
assert accept_response.status_code == HTTPStatus.CREATED
# Login with editor email and password
@@ -53,7 +58,6 @@ def test_user_invite_accept_role_grant(
assert user_me_response.status_code == HTTPStatus.OK
editor_id = user_me_response.json()["data"]["id"]
# check the forbidden response for admin api for editor user
admin_roles_response = requests.get(
signoz.self.host_configs["8080"].get("/api/v1/roles"),
@@ -79,11 +83,11 @@ def test_user_invite_accept_role_grant(
)
tuple_row = tuple_result.mappings().fetchone()
assert tuple_row is not None
assert tuple_row['object_type'] == "role"
assert tuple_row['relation'] == "assignee"
assert tuple_row["object_type"] == "role"
assert tuple_row["relation"] == "assignee"
# verify the user tuple details depending on db provider
if request.config.getoption("--sqlstore-provider") == 'sqlite':
if request.config.getoption("--sqlstore-provider") == "sqlite":
user_object_id = f"organization/{org_id}/user/{editor_id}"
assert tuple_row["user_object_type"] == "user"
assert tuple_row["user_object_id"] == user_object_id
@@ -93,7 +97,6 @@ def test_user_invite_accept_role_grant(
assert tuple_row["_user"] == _user
def test_user_update_role_grant(
request: pytest.FixtureRequest,
signoz: SigNoz,
@@ -122,9 +125,7 @@ def test_user_update_role_grant(
org_id = roles_data[0]["orgId"]
# Update the user's role to viewer
update_payload = {
"role": "VIEWER"
}
update_payload = {"role": "VIEWER"}
update_response = requests.put(
signoz.self.host_configs["8080"].get(f"/api/v1/user/{editor_id}"),
json=update_payload,
@@ -139,7 +140,9 @@ def test_user_update_role_grant(
viewer_tuple_object_id = f"organization/{org_id}/role/signoz-viewer"
# Check there is no tuple for signoz-editor assignment
editor_tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id AND relation = 'assignee'"),
sql.text(
"SELECT * FROM tuple WHERE object_id = :object_id AND relation = 'assignee'"
),
{"object_id": editor_tuple_object_id},
)
for row in editor_tuple_result.mappings().fetchall():
@@ -152,13 +155,15 @@ def test_user_update_role_grant(
# Check that a tuple exists for signoz-viewer assignment
viewer_tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id AND relation = 'assignee'"),
sql.text(
"SELECT * FROM tuple WHERE object_id = :object_id AND relation = 'assignee'"
),
{"object_id": viewer_tuple_object_id},
)
row = viewer_tuple_result.mappings().fetchone()
assert row is not None
assert row['object_type'] == "role"
assert row['relation'] == "assignee"
assert row["object_type"] == "role"
assert row["relation"] == "assignee"
if request.config.getoption("--sqlstore-provider") == "sqlite":
user_object_id = f"organization/{org_id}/user/{editor_id}"
assert row["user_object_type"] == "user"
@@ -168,6 +173,7 @@ def test_user_update_role_grant(
assert row["user_type"] == "user"
assert row["_user"] == _user
def test_user_delete_role_revoke(
request: pytest.FixtureRequest,
signoz: SigNoz,
@@ -205,10 +211,12 @@ def test_user_delete_role_revoke(
with signoz.sqlstore.conn.connect() as conn:
tuple_result = conn.execute(
sql.text("SELECT * FROM tuple WHERE object_id = :object_id AND relation = 'assignee'"),
sql.text(
"SELECT * FROM tuple WHERE object_id = :object_id AND relation = 'assignee'"
),
{"object_id": tuple_object_id},
)
# there should NOT be any tuple for the current user assignment
tuple_rows = tuple_result.mappings().fetchall()
for row in tuple_rows:
@@ -217,4 +225,4 @@ def test_user_delete_role_revoke(
assert row["user_object_id"] != user_object_id
else:
_user = f"user:organization/{org_id}/user/{editor_id}"
assert row["_user"] != _user
assert row["_user"] != _user

View File

@@ -0,0 +1,12 @@
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:01:00+00:00","value":2,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:02:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:03:00+00:00","value":14,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:04:00+00:00","value":3,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:05:00+00:00","value":6,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:06:00+00:00","value":25,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:07:00+00:00","value":25,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:08:00+00:00","value":25,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:09:00+00:00","value":10,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:10:00+00:00","value":12,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:11:00+00:00","value":8,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_multi_threshold_rule_test","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:12:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}

View File

@@ -0,0 +1,83 @@
{
"alert": "multi_threshold_rule_test",
"ruleType": "threshold_rule",
"alertType": "METRIC_BASED_ALERT",
"condition": {
"thresholds": {
"kind": "basic",
"spec": [
{
"name": "critical",
"target": 30,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
]
},
{
"name": "warning",
"target": 20,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
]
},
{
"name": "info",
"target": 10,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
]
}
]
},
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"aggregations": [
{
"metricName": "cpu_percent_multi_threshold_rule_test",
"timeAggregation": "avg",
"spaceAggregation": "max"
}
]
}
}
]
},
"selectedQueryName": "A"
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m0s",
"frequency": "15s"
}
},
"labels": {},
"annotations": {
"description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})",
"summary": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})"
},
"notificationSettings": {
"groupBy": [],
"usePolicy": false,
"renotify": {
"enabled": false,
"interval": "30m",
"alertStates": []
}
},
"version": "v5",
"schemaVersion": "v2alpha1"
}

View File

@@ -0,0 +1,12 @@
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:01:00+00:00","value":12,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:02:00+00:00","value":26,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:03:00+00:00","value":41,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:04:00+00:00","value":56,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:05:00+00:00","value":71,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:06:00+00:00","value":86,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:07:00+00:00","value":101,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:08:00+00:00","value":116,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:09:00+00:00","value":131,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:10:00+00:00","value":146,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:11:00+00:00","value":161,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"request_total_no_data_rule_test","labels":{"service":"api","endpoint":"/health","status_code":"200"},"timestamp":"2026-01-29T10:12:00+00:00","value":176,"temporality":"Cumulative","type_":"Sum","is_monotonic":true,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}

View File

@@ -0,0 +1,70 @@
{
"alert": "no_data_rule_test",
"ruleType": "threshold_rule",
"alertType": "METRIC_BASED_ALERT",
"condition": {
"thresholds": {
"kind": "basic",
"spec": [
{
"name": "critical",
"target": 10,
"matchType": "1",
"op": "1",
"channels": [
"test channel"
]
}
]
},
"compositeQuery": {
"queryType": "builder",
"panelType": "graph",
"queries": [
{
"type": "builder_query",
"spec": {
"name": "A",
"signal": "metrics",
"filter": {
"expression": "service = 'server'"
},
"aggregations": [
{
"metricName": "request_total_no_data_rule_test",
"timeAggregation": "rate",
"spaceAggregation": "sum"
}
]
}
}
]
},
"selectedQueryName": "A",
"alertOnAbsent": true,
"absentFor": 1
},
"evaluation": {
"kind": "rolling",
"spec": {
"evalWindow": "5m0s",
"frequency": "15s"
}
},
"labels": {},
"annotations": {
"description": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})",
"summary": "This alert is fired when the defined metric (current value: {{$value}}) crosses the threshold ({{$threshold}})"
},
"notificationSettings": {
"groupBy": [],
"usePolicy": false,
"renotify": {
"enabled": false,
"interval": "30m",
"alertStates": []
}
},
"version": "v5",
"schemaVersion": "v2alpha1"
}

View File

@@ -0,0 +1,12 @@
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:01:00+00:00","value":12,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:02:00+00:00","value":14,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:03:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:04:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:05:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:06:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:07:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:08:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:09:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:10:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:11:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}
{"metric_name":"cpu_percent_threshold_above_all_the_time","labels":{"host":"server-01","cpu":"cpu0"},"timestamp":"2026-01-29T10:12:00+00:00","value":15,"temporality":"Unspecified","type_":"Gauge","is_monotonic":false,"flags":0,"description":"","unit":"","env":"default","resource_attrs":{},"scope_attrs":{}}

Some files were not shown because too many files have changed in this diff Show More